commit
2f47668422
139
.coveragerc
139
.coveragerc
|
@ -1,12 +1,17 @@
|
|||
# Sorted by hassfest.
|
||||
#
|
||||
# To sort, run python3 -m script.hassfest -p coverage
|
||||
|
||||
[run]
|
||||
source = homeassistant
|
||||
omit =
|
||||
homeassistant/__main__.py
|
||||
homeassistant/helpers/backports/aiohttp_resolver.py
|
||||
homeassistant/helpers/signal.py
|
||||
homeassistant/scripts/__init__.py
|
||||
homeassistant/scripts/benchmark/__init__.py
|
||||
homeassistant/scripts/check_config.py
|
||||
homeassistant/scripts/ensure_config.py
|
||||
homeassistant/scripts/benchmark/__init__.py
|
||||
homeassistant/scripts/macos/__init__.py
|
||||
|
||||
# omit pieces of code that rely on external devices being present
|
||||
|
@ -103,10 +108,8 @@ omit =
|
|||
homeassistant/components/aurora/sensor.py
|
||||
homeassistant/components/avea/light.py
|
||||
homeassistant/components/avion/light.py
|
||||
homeassistant/components/azure_devops/__init__.py
|
||||
homeassistant/components/azure_devops/sensor.py
|
||||
homeassistant/components/azure_service_bus/*
|
||||
homeassistant/components/awair/coordinator.py
|
||||
homeassistant/components/azure_service_bus/*
|
||||
homeassistant/components/baf/__init__.py
|
||||
homeassistant/components/baf/climate.py
|
||||
homeassistant/components/baf/entity.py
|
||||
|
@ -190,8 +193,8 @@ omit =
|
|||
homeassistant/components/comelit/alarm_control_panel.py
|
||||
homeassistant/components/comelit/climate.py
|
||||
homeassistant/components/comelit/const.py
|
||||
homeassistant/components/comelit/cover.py
|
||||
homeassistant/components/comelit/coordinator.py
|
||||
homeassistant/components/comelit/cover.py
|
||||
homeassistant/components/comelit/humidifier.py
|
||||
homeassistant/components/comelit/light.py
|
||||
homeassistant/components/comelit/sensor.py
|
||||
|
@ -239,8 +242,8 @@ omit =
|
|||
homeassistant/components/dominos/*
|
||||
homeassistant/components/doods/*
|
||||
homeassistant/components/doorbird/__init__.py
|
||||
homeassistant/components/doorbird/camera.py
|
||||
homeassistant/components/doorbird/button.py
|
||||
homeassistant/components/doorbird/camera.py
|
||||
homeassistant/components/doorbird/device.py
|
||||
homeassistant/components/doorbird/entity.py
|
||||
homeassistant/components/doorbird/util.py
|
||||
|
@ -260,12 +263,12 @@ omit =
|
|||
homeassistant/components/dunehd/__init__.py
|
||||
homeassistant/components/dunehd/media_player.py
|
||||
homeassistant/components/duotecno/__init__.py
|
||||
homeassistant/components/duotecno/entity.py
|
||||
homeassistant/components/duotecno/switch.py
|
||||
homeassistant/components/duotecno/cover.py
|
||||
homeassistant/components/duotecno/light.py
|
||||
homeassistant/components/duotecno/climate.py
|
||||
homeassistant/components/duotecno/binary_sensor.py
|
||||
homeassistant/components/duotecno/climate.py
|
||||
homeassistant/components/duotecno/cover.py
|
||||
homeassistant/components/duotecno/entity.py
|
||||
homeassistant/components/duotecno/light.py
|
||||
homeassistant/components/duotecno/switch.py
|
||||
homeassistant/components/dwd_weather_warnings/const.py
|
||||
homeassistant/components/dwd_weather_warnings/coordinator.py
|
||||
homeassistant/components/dwd_weather_warnings/sensor.py
|
||||
|
@ -305,10 +308,12 @@ omit =
|
|||
homeassistant/components/edl21/__init__.py
|
||||
homeassistant/components/edl21/sensor.py
|
||||
homeassistant/components/egardia/*
|
||||
homeassistant/components/electrasmart/__init__.py
|
||||
homeassistant/components/electrasmart/climate.py
|
||||
homeassistant/components/electric_kiwi/__init__.py
|
||||
homeassistant/components/electric_kiwi/api.py
|
||||
homeassistant/components/electric_kiwi/oauth2.py
|
||||
homeassistant/components/electric_kiwi/coordinator.py
|
||||
homeassistant/components/electric_kiwi/oauth2.py
|
||||
homeassistant/components/electric_kiwi/select.py
|
||||
homeassistant/components/eliqonline/sensor.py
|
||||
homeassistant/components/elkm1/__init__.py
|
||||
|
@ -356,12 +361,18 @@ omit =
|
|||
homeassistant/components/environment_canada/weather.py
|
||||
homeassistant/components/envisalink/*
|
||||
homeassistant/components/ephember/climate.py
|
||||
homeassistant/components/epic_games_store/__init__.py
|
||||
homeassistant/components/epic_games_store/coordinator.py
|
||||
homeassistant/components/epion/__init__.py
|
||||
homeassistant/components/epion/coordinator.py
|
||||
homeassistant/components/epion/sensor.py
|
||||
homeassistant/components/epson/__init__.py
|
||||
homeassistant/components/epson/media_player.py
|
||||
homeassistant/components/epsonworkforce/sensor.py
|
||||
homeassistant/components/eq3btsmart/__init__.py
|
||||
homeassistant/components/eq3btsmart/climate.py
|
||||
homeassistant/components/eq3btsmart/const.py
|
||||
homeassistant/components/eq3btsmart/entity.py
|
||||
homeassistant/components/eq3btsmart/models.py
|
||||
homeassistant/components/escea/__init__.py
|
||||
homeassistant/components/escea/climate.py
|
||||
homeassistant/components/escea/discovery.py
|
||||
|
@ -376,11 +387,11 @@ omit =
|
|||
homeassistant/components/ezviz/binary_sensor.py
|
||||
homeassistant/components/ezviz/button.py
|
||||
homeassistant/components/ezviz/camera.py
|
||||
homeassistant/components/ezviz/coordinator.py
|
||||
homeassistant/components/ezviz/entity.py
|
||||
homeassistant/components/ezviz/image.py
|
||||
homeassistant/components/ezviz/light.py
|
||||
homeassistant/components/ezviz/coordinator.py
|
||||
homeassistant/components/ezviz/number.py
|
||||
homeassistant/components/ezviz/entity.py
|
||||
homeassistant/components/ezviz/select.py
|
||||
homeassistant/components/ezviz/sensor.py
|
||||
homeassistant/components/ezviz/siren.py
|
||||
|
@ -529,16 +540,13 @@ omit =
|
|||
homeassistant/components/hive/switch.py
|
||||
homeassistant/components/hive/water_heater.py
|
||||
homeassistant/components/hko/__init__.py
|
||||
homeassistant/components/hko/weather.py
|
||||
homeassistant/components/hko/coordinator.py
|
||||
homeassistant/components/hko/weather.py
|
||||
homeassistant/components/hlk_sw16/__init__.py
|
||||
homeassistant/components/hlk_sw16/switch.py
|
||||
homeassistant/components/home_connect/__init__.py
|
||||
homeassistant/components/home_connect/api.py
|
||||
homeassistant/components/home_connect/binary_sensor.py
|
||||
homeassistant/components/home_connect/entity.py
|
||||
homeassistant/components/home_connect/light.py
|
||||
homeassistant/components/home_connect/sensor.py
|
||||
homeassistant/components/home_connect/switch.py
|
||||
homeassistant/components/homematic/__init__.py
|
||||
homeassistant/components/homematic/binary_sensor.py
|
||||
|
@ -568,9 +576,9 @@ omit =
|
|||
homeassistant/components/hunterdouglas_powerview/sensor.py
|
||||
homeassistant/components/hunterdouglas_powerview/shade_data.py
|
||||
homeassistant/components/hunterdouglas_powerview/util.py
|
||||
homeassistant/components/hvv_departures/__init__.py
|
||||
homeassistant/components/huum/__init__.py
|
||||
homeassistant/components/huum/climate.py
|
||||
homeassistant/components/hvv_departures/__init__.py
|
||||
homeassistant/components/hvv_departures/binary_sensor.py
|
||||
homeassistant/components/hvv_departures/sensor.py
|
||||
homeassistant/components/ialarm/alarm_control_panel.py
|
||||
|
@ -663,9 +671,9 @@ omit =
|
|||
homeassistant/components/keyboard/*
|
||||
homeassistant/components/keyboard_remote/*
|
||||
homeassistant/components/keymitt_ble/__init__.py
|
||||
homeassistant/components/keymitt_ble/coordinator.py
|
||||
homeassistant/components/keymitt_ble/entity.py
|
||||
homeassistant/components/keymitt_ble/switch.py
|
||||
homeassistant/components/keymitt_ble/coordinator.py
|
||||
homeassistant/components/kitchen_sink/weather.py
|
||||
homeassistant/components/kiwi/lock.py
|
||||
homeassistant/components/kodi/__init__.py
|
||||
|
@ -733,6 +741,7 @@ omit =
|
|||
homeassistant/components/lutron/binary_sensor.py
|
||||
homeassistant/components/lutron/cover.py
|
||||
homeassistant/components/lutron/entity.py
|
||||
homeassistant/components/lutron/event.py
|
||||
homeassistant/components/lutron/fan.py
|
||||
homeassistant/components/lutron/light.py
|
||||
homeassistant/components/lutron/switch.py
|
||||
|
@ -836,8 +845,15 @@ omit =
|
|||
homeassistant/components/mysensors/switch.py
|
||||
homeassistant/components/mystrom/binary_sensor.py
|
||||
homeassistant/components/mystrom/light.py
|
||||
homeassistant/components/mystrom/switch.py
|
||||
homeassistant/components/mystrom/sensor.py
|
||||
homeassistant/components/mystrom/switch.py
|
||||
homeassistant/components/myuplink/__init__.py
|
||||
homeassistant/components/myuplink/api.py
|
||||
homeassistant/components/myuplink/application_credentials.py
|
||||
homeassistant/components/myuplink/coordinator.py
|
||||
homeassistant/components/myuplink/entity.py
|
||||
homeassistant/components/myuplink/helpers.py
|
||||
homeassistant/components/myuplink/sensor.py
|
||||
homeassistant/components/nad/media_player.py
|
||||
homeassistant/components/nanoleaf/__init__.py
|
||||
homeassistant/components/nanoleaf/button.py
|
||||
|
@ -845,13 +861,13 @@ omit =
|
|||
homeassistant/components/nanoleaf/light.py
|
||||
homeassistant/components/neato/__init__.py
|
||||
homeassistant/components/neato/api.py
|
||||
homeassistant/components/neato/button.py
|
||||
homeassistant/components/neato/camera.py
|
||||
homeassistant/components/neato/entity.py
|
||||
homeassistant/components/neato/hub.py
|
||||
homeassistant/components/neato/sensor.py
|
||||
homeassistant/components/neato/switch.py
|
||||
homeassistant/components/neato/vacuum.py
|
||||
homeassistant/components/neato/button.py
|
||||
homeassistant/components/nederlandse_spoorwegen/sensor.py
|
||||
homeassistant/components/netdata/sensor.py
|
||||
homeassistant/components/netgear/__init__.py
|
||||
|
@ -961,15 +977,16 @@ omit =
|
|||
homeassistant/components/openweathermap/weather.py
|
||||
homeassistant/components/openweathermap/weather_update_coordinator.py
|
||||
homeassistant/components/opnsense/__init__.py
|
||||
homeassistant/components/opnsense/device_tracker.py
|
||||
homeassistant/components/opower/__init__.py
|
||||
homeassistant/components/opower/coordinator.py
|
||||
homeassistant/components/opower/sensor.py
|
||||
homeassistant/components/opnsense/device_tracker.py
|
||||
homeassistant/components/opple/light.py
|
||||
homeassistant/components/oru/*
|
||||
homeassistant/components/orvibo/switch.py
|
||||
homeassistant/components/osoenergy/__init__.py
|
||||
homeassistant/components/osoenergy/const.py
|
||||
homeassistant/components/osoenergy/sensor.py
|
||||
homeassistant/components/osoenergy/water_heater.py
|
||||
homeassistant/components/osramlightify/light.py
|
||||
homeassistant/components/otp/sensor.py
|
||||
|
@ -1091,17 +1108,6 @@ omit =
|
|||
homeassistant/components/rainmachine/switch.py
|
||||
homeassistant/components/rainmachine/update.py
|
||||
homeassistant/components/rainmachine/util.py
|
||||
homeassistant/components/renson/__init__.py
|
||||
homeassistant/components/renson/const.py
|
||||
homeassistant/components/renson/coordinator.py
|
||||
homeassistant/components/renson/entity.py
|
||||
homeassistant/components/renson/sensor.py
|
||||
homeassistant/components/renson/button.py
|
||||
homeassistant/components/renson/fan.py
|
||||
homeassistant/components/renson/switch.py
|
||||
homeassistant/components/renson/binary_sensor.py
|
||||
homeassistant/components/renson/number.py
|
||||
homeassistant/components/renson/time.py
|
||||
homeassistant/components/raspyrfm/*
|
||||
homeassistant/components/recollect_waste/sensor.py
|
||||
homeassistant/components/recorder/repack.py
|
||||
|
@ -1116,6 +1122,17 @@ omit =
|
|||
homeassistant/components/rejseplanen/sensor.py
|
||||
homeassistant/components/remember_the_milk/__init__.py
|
||||
homeassistant/components/remote_rpi_gpio/*
|
||||
homeassistant/components/renson/__init__.py
|
||||
homeassistant/components/renson/binary_sensor.py
|
||||
homeassistant/components/renson/button.py
|
||||
homeassistant/components/renson/const.py
|
||||
homeassistant/components/renson/coordinator.py
|
||||
homeassistant/components/renson/entity.py
|
||||
homeassistant/components/renson/fan.py
|
||||
homeassistant/components/renson/number.py
|
||||
homeassistant/components/renson/sensor.py
|
||||
homeassistant/components/renson/switch.py
|
||||
homeassistant/components/renson/time.py
|
||||
homeassistant/components/reolink/binary_sensor.py
|
||||
homeassistant/components/reolink/button.py
|
||||
homeassistant/components/reolink/camera.py
|
||||
|
@ -1141,8 +1158,10 @@ omit =
|
|||
homeassistant/components/roborock/coordinator.py
|
||||
homeassistant/components/rocketchat/notify.py
|
||||
homeassistant/components/romy/__init__.py
|
||||
homeassistant/components/romy/binary_sensor.py
|
||||
homeassistant/components/romy/coordinator.py
|
||||
homeassistant/components/romy/entity.py
|
||||
homeassistant/components/romy/sensor.py
|
||||
homeassistant/components/romy/vacuum.py
|
||||
homeassistant/components/roomba/__init__.py
|
||||
homeassistant/components/roomba/binary_sensor.py
|
||||
|
@ -1159,23 +1178,24 @@ omit =
|
|||
homeassistant/components/route53/*
|
||||
homeassistant/components/rpi_camera/*
|
||||
homeassistant/components/rtorrent/sensor.py
|
||||
homeassistant/components/russound_rio/media_player.py
|
||||
homeassistant/components/russound_rnet/media_player.py
|
||||
homeassistant/components/ruuvi_gateway/__init__.py
|
||||
homeassistant/components/ruuvi_gateway/bluetooth.py
|
||||
homeassistant/components/ruuvi_gateway/coordinator.py
|
||||
homeassistant/components/russound_rio/media_player.py
|
||||
homeassistant/components/russound_rnet/media_player.py
|
||||
homeassistant/components/rympro/__init__.py
|
||||
homeassistant/components/rympro/coordinator.py
|
||||
homeassistant/components/rympro/sensor.py
|
||||
homeassistant/components/sabnzbd/__init__.py
|
||||
homeassistant/components/sabnzbd/coordinator.py
|
||||
homeassistant/components/sabnzbd/sensor.py
|
||||
homeassistant/components/saj/sensor.py
|
||||
homeassistant/components/satel_integra/*
|
||||
homeassistant/components/schluter/*
|
||||
homeassistant/components/screenlogic/binary_sensor.py
|
||||
homeassistant/components/screenlogic/climate.py
|
||||
homeassistant/components/screenlogic/coordinator.py
|
||||
homeassistant/components/screenlogic/const.py
|
||||
homeassistant/components/screenlogic/coordinator.py
|
||||
homeassistant/components/screenlogic/entity.py
|
||||
homeassistant/components/screenlogic/light.py
|
||||
homeassistant/components/screenlogic/number.py
|
||||
|
@ -1250,8 +1270,8 @@ omit =
|
|||
homeassistant/components/solaredge/coordinator.py
|
||||
homeassistant/components/solaredge_local/sensor.py
|
||||
homeassistant/components/solarlog/__init__.py
|
||||
homeassistant/components/solarlog/sensor.py
|
||||
homeassistant/components/solarlog/coordinator.py
|
||||
homeassistant/components/solarlog/sensor.py
|
||||
homeassistant/components/solax/__init__.py
|
||||
homeassistant/components/solax/sensor.py
|
||||
homeassistant/components/soma/__init__.py
|
||||
|
@ -1288,14 +1308,6 @@ omit =
|
|||
homeassistant/components/squeezebox/__init__.py
|
||||
homeassistant/components/squeezebox/browse_media.py
|
||||
homeassistant/components/squeezebox/media_player.py
|
||||
homeassistant/components/starlink/__init__.py
|
||||
homeassistant/components/starlink/binary_sensor.py
|
||||
homeassistant/components/starlink/button.py
|
||||
homeassistant/components/starlink/coordinator.py
|
||||
homeassistant/components/starlink/device_tracker.py
|
||||
homeassistant/components/starlink/sensor.py
|
||||
homeassistant/components/starlink/switch.py
|
||||
homeassistant/components/starlink/time.py
|
||||
homeassistant/components/starline/__init__.py
|
||||
homeassistant/components/starline/account.py
|
||||
homeassistant/components/starline/binary_sensor.py
|
||||
|
@ -1306,6 +1318,14 @@ omit =
|
|||
homeassistant/components/starline/sensor.py
|
||||
homeassistant/components/starline/switch.py
|
||||
homeassistant/components/starlingbank/sensor.py
|
||||
homeassistant/components/starlink/__init__.py
|
||||
homeassistant/components/starlink/binary_sensor.py
|
||||
homeassistant/components/starlink/button.py
|
||||
homeassistant/components/starlink/coordinator.py
|
||||
homeassistant/components/starlink/device_tracker.py
|
||||
homeassistant/components/starlink/sensor.py
|
||||
homeassistant/components/starlink/switch.py
|
||||
homeassistant/components/starlink/time.py
|
||||
homeassistant/components/steam_online/sensor.py
|
||||
homeassistant/components/stiebel_eltron/*
|
||||
homeassistant/components/stookalert/__init__.py
|
||||
|
@ -1349,9 +1369,9 @@ omit =
|
|||
homeassistant/components/switchbot/entity.py
|
||||
homeassistant/components/switchbot/humidifier.py
|
||||
homeassistant/components/switchbot/light.py
|
||||
homeassistant/components/switchbot/lock.py
|
||||
homeassistant/components/switchbot/sensor.py
|
||||
homeassistant/components/switchbot/switch.py
|
||||
homeassistant/components/switchbot/lock.py
|
||||
homeassistant/components/switchbot_cloud/climate.py
|
||||
homeassistant/components/switchbot_cloud/coordinator.py
|
||||
homeassistant/components/switchbot_cloud/entity.py
|
||||
|
@ -1391,11 +1411,6 @@ omit =
|
|||
homeassistant/components/tado/water_heater.py
|
||||
homeassistant/components/tami4/button.py
|
||||
homeassistant/components/tank_utility/sensor.py
|
||||
homeassistant/components/tankerkoenig/__init__.py
|
||||
homeassistant/components/tankerkoenig/binary_sensor.py
|
||||
homeassistant/components/tankerkoenig/coordinator.py
|
||||
homeassistant/components/tankerkoenig/entity.py
|
||||
homeassistant/components/tankerkoenig/sensor.py
|
||||
homeassistant/components/tapsaff/binary_sensor.py
|
||||
homeassistant/components/tautulli/__init__.py
|
||||
homeassistant/components/tautulli/coordinator.py
|
||||
|
@ -1464,6 +1479,7 @@ omit =
|
|||
homeassistant/components/traccar_server/device_tracker.py
|
||||
homeassistant/components/traccar_server/entity.py
|
||||
homeassistant/components/traccar_server/helpers.py
|
||||
homeassistant/components/traccar_server/sensor.py
|
||||
homeassistant/components/tractive/__init__.py
|
||||
homeassistant/components/tractive/binary_sensor.py
|
||||
homeassistant/components/tractive/device_tracker.py
|
||||
|
@ -1511,9 +1527,9 @@ omit =
|
|||
homeassistant/components/ue_smart_radio/media_player.py
|
||||
homeassistant/components/ukraine_alarm/__init__.py
|
||||
homeassistant/components/ukraine_alarm/binary_sensor.py
|
||||
homeassistant/components/unifiled/*
|
||||
homeassistant/components/unifi_direct/__init__.py
|
||||
homeassistant/components/unifi_direct/device_tracker.py
|
||||
homeassistant/components/unifiled/*
|
||||
homeassistant/components/upb/__init__.py
|
||||
homeassistant/components/upb/light.py
|
||||
homeassistant/components/upc_connect/*
|
||||
|
@ -1523,7 +1539,6 @@ omit =
|
|||
homeassistant/components/upnp/__init__.py
|
||||
homeassistant/components/upnp/device.py
|
||||
homeassistant/components/upnp/sensor.py
|
||||
homeassistant/components/vasttrafik/sensor.py
|
||||
homeassistant/components/v2c/__init__.py
|
||||
homeassistant/components/v2c/binary_sensor.py
|
||||
homeassistant/components/v2c/coordinator.py
|
||||
|
@ -1531,6 +1546,7 @@ omit =
|
|||
homeassistant/components/v2c/number.py
|
||||
homeassistant/components/v2c/sensor.py
|
||||
homeassistant/components/v2c/switch.py
|
||||
homeassistant/components/vasttrafik/sensor.py
|
||||
homeassistant/components/velbus/__init__.py
|
||||
homeassistant/components/velbus/binary_sensor.py
|
||||
homeassistant/components/velbus/button.py
|
||||
|
@ -1538,8 +1554,8 @@ omit =
|
|||
homeassistant/components/velbus/cover.py
|
||||
homeassistant/components/velbus/entity.py
|
||||
homeassistant/components/velbus/light.py
|
||||
homeassistant/components/velbus/sensor.py
|
||||
homeassistant/components/velbus/select.py
|
||||
homeassistant/components/velbus/sensor.py
|
||||
homeassistant/components/velbus/switch.py
|
||||
homeassistant/components/velux/__init__.py
|
||||
homeassistant/components/velux/cover.py
|
||||
|
@ -1705,12 +1721,12 @@ omit =
|
|||
homeassistant/components/zeversolar/coordinator.py
|
||||
homeassistant/components/zeversolar/entity.py
|
||||
homeassistant/components/zeversolar/sensor.py
|
||||
homeassistant/components/zha/websocket_api.py
|
||||
homeassistant/components/zha/core/cluster_handlers/*
|
||||
homeassistant/components/zha/core/device.py
|
||||
homeassistant/components/zha/core/gateway.py
|
||||
homeassistant/components/zha/core/helpers.py
|
||||
homeassistant/components/zha/light.py
|
||||
homeassistant/components/zha/websocket_api.py
|
||||
homeassistant/components/zhong_hong/climate.py
|
||||
homeassistant/components/ziggo_mediabox_xl/media_player.py
|
||||
homeassistant/components/zoneminder/*
|
||||
|
@ -1727,15 +1743,6 @@ omit =
|
|||
homeassistant/components/zwave_me/sensor.py
|
||||
homeassistant/components/zwave_me/siren.py
|
||||
homeassistant/components/zwave_me/switch.py
|
||||
homeassistant/components/electrasmart/climate.py
|
||||
homeassistant/components/electrasmart/__init__.py
|
||||
homeassistant/components/myuplink/__init__.py
|
||||
homeassistant/components/myuplink/api.py
|
||||
homeassistant/components/myuplink/application_credentials.py
|
||||
homeassistant/components/myuplink/coordinator.py
|
||||
homeassistant/components/myuplink/entity.py
|
||||
homeassistant/components/myuplink/helpers.py
|
||||
homeassistant/components/myuplink/sensor.py
|
||||
|
||||
|
||||
[report]
|
||||
|
|
|
@ -4,7 +4,10 @@
|
|||
"dockerFile": "../Dockerfile.dev",
|
||||
"postCreateCommand": "script/setup",
|
||||
"postStartCommand": "script/bootstrap",
|
||||
"containerEnv": { "DEVCONTAINER": "1" },
|
||||
"containerEnv": {
|
||||
"DEVCONTAINER": "1",
|
||||
"PYTHONASYNCIODEBUG": "1"
|
||||
},
|
||||
// Port 5683 udp is used by Shelly integration
|
||||
"appPort": ["8123:8123", "5683:5683/udp"],
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait"],
|
||||
|
|
|
@ -27,7 +27,7 @@ jobs:
|
|||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
@ -69,7 +69,7 @@ jobs:
|
|||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
|
@ -90,7 +90,7 @@ jobs:
|
|||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
|
@ -174,17 +174,8 @@ jobs:
|
|||
sed -i "s|pyezviz|# pyezviz|g" requirements_all.txt
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Adjustments for 64-bit
|
||||
if: matrix.arch == 'amd64' || matrix.arch == 'aarch64'
|
||||
run: |
|
||||
# Some speedups are only available on 64-bit, and since
|
||||
# we build 32bit images on 64bit hosts, we only enable
|
||||
# the speed ups on 64bit since the wheels for 32bit
|
||||
# are not available.
|
||||
sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.6
|
||||
with:
|
||||
name: translations
|
||||
|
||||
|
@ -251,7 +242,7 @@ jobs:
|
|||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
|
@ -288,7 +279,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
|
@ -329,7 +320,7 @@ jobs:
|
|||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.4.0
|
||||
|
@ -459,7 +450,7 @@ jobs:
|
|||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
@ -467,7 +458,7 @@ jobs:
|
|||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.6
|
||||
with:
|
||||
name: translations
|
||||
|
||||
|
|
|
@ -33,10 +33,10 @@ on:
|
|||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 5
|
||||
CACHE_VERSION: 8
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 8
|
||||
HA_SHORT_VERSION: "2024.4"
|
||||
HA_SHORT_VERSION: "2024.5"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
||||
# 10.3 is the oldest supported version
|
||||
|
@ -89,14 +89,16 @@ jobs:
|
|||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: >-
|
||||
echo "key=venv-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('requirements_test.txt') }}-${{
|
||||
hashFiles('requirements_test.txt', 'requirements_test_pre_commit.txt') }}-${{
|
||||
hashFiles('requirements.txt') }}-${{
|
||||
hashFiles('requirements_all.txt') }}-${{
|
||||
hashFiles('homeassistant/package_constraints.txt') }}" >> $GITHUB_OUTPUT
|
||||
hashFiles('homeassistant/package_constraints.txt') }}-${{
|
||||
hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial pre-commit restore key
|
||||
id: generate_pre-commit_cache_key
|
||||
run: >-
|
||||
|
@ -222,7 +224,7 @@ jobs:
|
|||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
@ -268,7 +270,7 @@ jobs:
|
|||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
|
@ -308,7 +310,7 @@ jobs:
|
|||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
|
@ -347,7 +349,7 @@ jobs:
|
|||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
|
@ -441,7 +443,7 @@ jobs:
|
|||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
@ -450,8 +452,10 @@ jobs:
|
|||
check-latest: true
|
||||
- name: Generate partial uv restore key
|
||||
id: generate-uv-key
|
||||
run: >-
|
||||
echo "key=uv-${{ env.UV_CACHE_VERSION }}-${{
|
||||
run: |
|
||||
uv_version=$(cat requirements_test.txt | grep uv | cut -d '=' -f 3)
|
||||
echo "version=${uv_version}" >> $GITHUB_OUTPUT
|
||||
echo "key=uv-${{ env.UV_CACHE_VERSION }}-${uv_version}-${{
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
|
@ -471,10 +475,13 @@ jobs:
|
|||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-uv-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{ env.UV_CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Install additional OS dependencies
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
|
@ -484,6 +491,7 @@ jobs:
|
|||
libavfilter-dev \
|
||||
libavformat-dev \
|
||||
libavutil-dev \
|
||||
libgammu-dev \
|
||||
libswresample-dev \
|
||||
libswscale-dev \
|
||||
libudev-dev
|
||||
|
@ -495,7 +503,9 @@ jobs:
|
|||
python --version
|
||||
pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
uv pip install -U "pip>=21.3.1" setuptools wheel
|
||||
uv pip install -r requirements_all.txt
|
||||
uv pip install -r requirements.txt
|
||||
python -m script.gen_requirements_all ci
|
||||
uv pip install -r requirements_all_pytest.txt
|
||||
uv pip install -r requirements_test.txt
|
||||
uv pip install -e . --config-settings editable_mode=compat
|
||||
|
||||
|
@ -510,7 +520,7 @@ jobs:
|
|||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
@ -542,7 +552,7 @@ jobs:
|
|||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
@ -575,7 +585,7 @@ jobs:
|
|||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
@ -619,7 +629,7 @@ jobs:
|
|||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
@ -670,14 +680,63 @@ jobs:
|
|||
python --version
|
||||
mypy homeassistant/components/${{ needs.info.outputs.integrations_glob }}
|
||||
|
||||
pytest:
|
||||
prepare-pytest-full:
|
||||
runs-on: ubuntu-22.04
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& (needs.info.outputs.test_full_suite == 'true' || needs.info.outputs.tests_glob)
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Run split_tests.py
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
overwrite: true
|
||||
|
||||
pytest-full:
|
||||
runs-on: ubuntu-22.04
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
|
@ -686,6 +745,7 @@ jobs:
|
|||
- lint-other
|
||||
- lint-ruff
|
||||
- mypy
|
||||
- prepare-pytest-full
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
@ -696,12 +756,14 @@ jobs:
|
|||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg
|
||||
ffmpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
@ -722,12 +784,15 @@ jobs:
|
|||
- name: Register pytest slow test problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.6
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python3 -m script.translations develop --all
|
||||
- name: Run pytest (fully)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
- name: Run pytest
|
||||
timeout-minutes: 60
|
||||
id: pytest-full
|
||||
env:
|
||||
|
@ -748,62 +813,27 @@ jobs:
|
|||
--durations=10 \
|
||||
-n auto \
|
||||
--dist=loadfile \
|
||||
--test-group-count ${{ needs.info.outputs.test_group_count }} \
|
||||
--test-group=${{ matrix.group }} \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
-p no:sugar \
|
||||
tests \
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Run pytest (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
timeout-minutes: 10
|
||||
id: pytest-partial
|
||||
shell: bash
|
||||
env:
|
||||
PYTHONDONTWRITEBYTECODE: 1
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
set -o pipefail
|
||||
|
||||
if [[ ! -f "tests/components/${{ matrix.group }}/__init__.py" ]]; then
|
||||
echo "::error:: missing file tests/components/${{ matrix.group }}/__init__.py"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cov_params=()
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
-n auto \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=0 \
|
||||
--durations-min=1 \
|
||||
-p no:sugar \
|
||||
tests/components/${{ matrix.group }} \
|
||||
$(sed -n "${{ matrix.group }},1p" pytest_buckets.txt) \
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && (steps.pytest-full.conclusion == 'failure' || steps.pytest-partial.conclusion == 'failure')
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Remove pytest_buckets
|
||||
run: rm pytest_buckets.txt
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
|
@ -842,13 +872,14 @@ jobs:
|
|||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libmariadb-dev-compat
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
@ -912,7 +943,7 @@ jobs:
|
|||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
|
@ -920,7 +951,7 @@ jobs:
|
|||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
|
@ -964,13 +995,14 @@ jobs:
|
|||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
@ -1035,7 +1067,7 @@ jobs:
|
|||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
|
@ -1043,7 +1075,7 @@ jobs:
|
|||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
|
@ -1053,39 +1085,160 @@ jobs:
|
|||
run: |
|
||||
./script/check_dirty
|
||||
|
||||
coverage:
|
||||
name: Upload test coverage to Codecov
|
||||
coverage-full:
|
||||
name: Upload test coverage to Codecov (full suite)
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- info
|
||||
- pytest
|
||||
- pytest-full
|
||||
- pytest-postgres
|
||||
- pytest-mariadb
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.6
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov (full coverage)
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: Wandalen/wretry.action@v2.1.0
|
||||
uses: codecov/codecov-action@v4.3.0
|
||||
with:
|
||||
action: codecov/codecov-action@v3.1.3
|
||||
with: |
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
token: ${{ env.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 30000
|
||||
- name: Upload coverage to Codecov (partial coverage)
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
pytest-partial:
|
||||
runs-on: ubuntu-22.04
|
||||
if: |
|
||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& needs.info.outputs.tests_glob
|
||||
&& needs.info.outputs.test_full_suite == 'false'
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- mypy
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
group: ${{ fromJson(needs.info.outputs.test_groups) }}
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
name: >-
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Register pytest slow test problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Compile English translations
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python3 -m script.translations develop --all
|
||||
- name: Run pytest
|
||||
timeout-minutes: 10
|
||||
id: pytest-partial
|
||||
shell: bash
|
||||
env:
|
||||
PYTHONDONTWRITEBYTECODE: 1
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
set -o pipefail
|
||||
|
||||
if [[ ! -f "tests/components/${{ matrix.group }}/__init__.py" ]]; then
|
||||
echo "::error:: missing file tests/components/${{ matrix.group }}/__init__.py"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cov_params=()
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
fi
|
||||
|
||||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
-n auto \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=0 \
|
||||
--durations-min=1 \
|
||||
-p no:sugar \
|
||||
tests/components/${{ matrix.group }} \
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
|
||||
coverage-partial:
|
||||
name: Upload test coverage to Codecov (partial suite)
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- info
|
||||
- pytest-partial
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.3
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.6
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: Wandalen/wretry.action@v2.1.0
|
||||
uses: codecov/codecov-action@v4.3.0
|
||||
with:
|
||||
action: codecov/codecov-action@v3.1.3
|
||||
with: |
|
||||
fail_ci_if_error: true
|
||||
token: ${{ env.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 30000
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
|
|
@ -21,14 +21,14 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.24.9
|
||||
uses: github/codeql-action/init@v3.25.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.24.9
|
||||
uses: github/codeql-action/analyze@v3.25.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
|
|
@ -19,7 +19,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
|
|
@ -14,6 +14,10 @@ on:
|
|||
- "homeassistant/package_constraints.txt"
|
||||
- "requirements_all.txt"
|
||||
- "requirements.txt"
|
||||
- "script/gen_requirements_all.py"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name}}
|
||||
|
@ -28,7 +32,22 @@ jobs:
|
|||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
||||
- name: Create Python virtual environment
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
uv pip install -r requirements.txt
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
|
@ -63,19 +82,30 @@ jobs:
|
|||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Generate requirements
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
|
||||
core:
|
||||
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
|
@ -88,15 +118,15 @@ jobs:
|
|||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.6
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.6
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
|
@ -126,57 +156,30 @@ jobs:
|
|||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.3
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.6
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.6
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: (Un)comment packages
|
||||
run: |
|
||||
requirement_files="requirements_all.txt requirements_diff.txt"
|
||||
for requirement_file in ${requirement_files}; do
|
||||
sed -i "s|# fritzconnection|fritzconnection|g" ${requirement_file}
|
||||
sed -i "s|# pyuserinput|pyuserinput|g" ${requirement_file}
|
||||
sed -i "s|# evdev|evdev|g" ${requirement_file}
|
||||
sed -i "s|# pycups|pycups|g" ${requirement_file}
|
||||
sed -i "s|# homekit|homekit|g" ${requirement_file}
|
||||
sed -i "s|# decora-wifi|decora-wifi|g" ${requirement_file}
|
||||
sed -i "s|# python-gammu|python-gammu|g" ${requirement_file}
|
||||
|
||||
# Some packages are not buildable on armhf anymore
|
||||
if [ "${{ matrix.arch }}" = "armhf" ]; then
|
||||
|
||||
# Pandas has issues building on armhf, it is expected they
|
||||
# will drop the platform in the near future (they consider it
|
||||
# "flimsy" on 386). The following packages depend on pandas,
|
||||
# so we comment them out.
|
||||
sed -i "s|env-canada|# env-canada|g" ${requirement_file}
|
||||
sed -i "s|noaa-coops|# noaa-coops|g" ${requirement_file}
|
||||
sed -i "s|pyezviz|# pyezviz|g" ${requirement_file}
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" ${requirement_file}
|
||||
fi
|
||||
|
||||
# Some speedups are only for 64-bit
|
||||
if [ "${{ matrix.arch }}" = "amd64" ] || [ "${{ matrix.arch }}" = "aarch64" ]; then
|
||||
sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" ${requirement_file}
|
||||
fi
|
||||
|
||||
done
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.1.6
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into two different files.
|
||||
# We split requirements all into multiple files.
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all.txt requirements_all.txt
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
run: |
|
||||
|
|
|
@ -132,3 +132,6 @@ tmp_cache
|
|||
|
||||
# python-language-server / Rope
|
||||
.ropeproject
|
||||
|
||||
# Will be created from script/split_tests.py
|
||||
pytest_buckets.txt
|
|
@ -1,12 +1,12 @@
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.3.4
|
||||
rev: v0.4.1
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
- --fix
|
||||
- id: ruff-format
|
||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$
|
||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.6
|
||||
hooks:
|
||||
|
@ -15,7 +15,7 @@ repos:
|
|||
- --ignore-words-list=additionals,alle,alot,astroid,bund,caf,convencional,currenty,datas,farenheit,falsy,fo,frequence,haa,hass,iif,incomfort,ines,ist,nam,nd,pres,pullrequests,resset,rime,ser,serie,te,technik,ue,unsecure,vor,withing,zar
|
||||
- --skip="./.*,*.csv,*.json,*.ambr"
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json]
|
||||
exclude_types: [csv, json, html]
|
||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
|
@ -63,7 +63,7 @@ repos:
|
|||
language: script
|
||||
types: [python]
|
||||
require_serial: true
|
||||
files: ^(homeassistant|pylint)/.+\.py$
|
||||
files: ^(homeassistant|pylint)/.+\.(py|pyi)$
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: script/run-in-env.sh pylint -j 0 --ignore-missing-annotations=y
|
||||
|
@ -83,7 +83,7 @@ repos:
|
|||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|\.coveragerc|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py)$
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|\.coveragerc|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements_test.txt)$
|
||||
- id: hassfest-metadata
|
||||
name: hassfest-metadata
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata
|
||||
|
|
|
@ -66,6 +66,7 @@ homeassistant.components.alpha_vantage.*
|
|||
homeassistant.components.amazon_polly.*
|
||||
homeassistant.components.amberelectric.*
|
||||
homeassistant.components.ambiclimate.*
|
||||
homeassistant.components.ambient_network.*
|
||||
homeassistant.components.ambient_station.*
|
||||
homeassistant.components.amcrest.*
|
||||
homeassistant.components.ampio.*
|
||||
|
@ -166,10 +167,12 @@ homeassistant.components.electric_kiwi.*
|
|||
homeassistant.components.elgato.*
|
||||
homeassistant.components.elkm1.*
|
||||
homeassistant.components.emulated_hue.*
|
||||
homeassistant.components.energenie_power_sockets.*
|
||||
homeassistant.components.energy.*
|
||||
homeassistant.components.energyzero.*
|
||||
homeassistant.components.enigma2.*
|
||||
homeassistant.components.enphase_envoy.*
|
||||
homeassistant.components.eq3btsmart.*
|
||||
homeassistant.components.esphome.*
|
||||
homeassistant.components.event.*
|
||||
homeassistant.components.evil_genius_labs.*
|
||||
|
@ -361,6 +364,7 @@ homeassistant.components.rest_command.*
|
|||
homeassistant.components.rfxtrx.*
|
||||
homeassistant.components.rhasspy.*
|
||||
homeassistant.components.ridwell.*
|
||||
homeassistant.components.ring.*
|
||||
homeassistant.components.rituals_perfume_genie.*
|
||||
homeassistant.components.roku.*
|
||||
homeassistant.components.romy.*
|
||||
|
|
79
CODEOWNERS
79
CODEOWNERS
|
@ -5,13 +5,30 @@
|
|||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners
|
||||
|
||||
# Home Assistant Core
|
||||
setup.cfg @home-assistant/core
|
||||
.core_files.yaml @home-assistant/core
|
||||
.git-blame-ignore-revs @home-assistant/core
|
||||
.gitattributes @home-assistant/core
|
||||
.gitignore @home-assistant/core
|
||||
.hadolint.yaml @home-assistant/core
|
||||
.pre-commit-config.yaml @home-assistant/core
|
||||
.prettierignore @home-assistant/core
|
||||
.yamllint @home-assistant/core
|
||||
pyproject.toml @home-assistant/core
|
||||
requirements_test.txt @home-assistant/core
|
||||
/.devcontainer/ @home-assistant/core
|
||||
/.github/ @home-assistant/core
|
||||
/.vscode/ @home-assistant/core
|
||||
/homeassistant/*.py @home-assistant/core
|
||||
/homeassistant/auth/ @home-assistant/core
|
||||
/homeassistant/backports/ @home-assistant/core
|
||||
/homeassistant/helpers/ @home-assistant/core
|
||||
/homeassistant/scripts/ @home-assistant/core
|
||||
/homeassistant/util/ @home-assistant/core
|
||||
/pylint/ @home-assistant/core
|
||||
/script/ @home-assistant/core
|
||||
|
||||
# Home Assistant Supervisor
|
||||
.dockerignore @home-assistant/supervisor
|
||||
build.json @home-assistant/supervisor
|
||||
/machine/ @home-assistant/supervisor
|
||||
/rootfs/ @home-assistant/supervisor
|
||||
|
@ -73,6 +90,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/amberelectric/ @madpilot
|
||||
/homeassistant/components/ambiclimate/ @danielhiversen
|
||||
/tests/components/ambiclimate/ @danielhiversen
|
||||
/homeassistant/components/ambient_network/ @thomaskistler
|
||||
/tests/components/ambient_network/ @thomaskistler
|
||||
/homeassistant/components/ambient_station/ @bachya
|
||||
/tests/components/ambient_station/ @bachya
|
||||
/homeassistant/components/amcrest/ @flacjacket
|
||||
|
@ -113,6 +132,8 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/arcam_fmj/ @elupus
|
||||
/tests/components/arcam_fmj/ @elupus
|
||||
/homeassistant/components/arris_tg2492lg/ @vanbalken
|
||||
/homeassistant/components/arve/ @ikalnyi
|
||||
/tests/components/arve/ @ikalnyi
|
||||
/homeassistant/components/aseko_pool_live/ @milanmeu
|
||||
/tests/components/aseko_pool_live/ @milanmeu
|
||||
/homeassistant/components/assist_pipeline/ @balloob @synesthesiam
|
||||
|
@ -299,8 +320,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/discovergy/ @jpbede
|
||||
/homeassistant/components/dlink/ @tkdrob
|
||||
/tests/components/dlink/ @tkdrob
|
||||
/homeassistant/components/dlna_dmr/ @StevenLooman @chishm
|
||||
/tests/components/dlna_dmr/ @StevenLooman @chishm
|
||||
/homeassistant/components/dlna_dmr/ @chishm
|
||||
/tests/components/dlna_dmr/ @chishm
|
||||
/homeassistant/components/dlna_dms/ @chishm
|
||||
/tests/components/dlna_dms/ @chishm
|
||||
/homeassistant/components/dnsip/ @gjohansson-ST
|
||||
|
@ -361,11 +382,14 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/emulated_hue/ @bdraco @Tho85
|
||||
/homeassistant/components/emulated_kasa/ @kbickar
|
||||
/tests/components/emulated_kasa/ @kbickar
|
||||
/homeassistant/components/energenie_power_sockets/ @gnumpi
|
||||
/tests/components/energenie_power_sockets/ @gnumpi
|
||||
/homeassistant/components/energy/ @home-assistant/core
|
||||
/tests/components/energy/ @home-assistant/core
|
||||
/homeassistant/components/energyzero/ @klaasnicolaas
|
||||
/tests/components/energyzero/ @klaasnicolaas
|
||||
/homeassistant/components/enigma2/ @autinerd
|
||||
/tests/components/enigma2/ @autinerd
|
||||
/homeassistant/components/enocean/ @bdurrer
|
||||
/tests/components/enocean/ @bdurrer
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac
|
||||
|
@ -374,11 +398,14 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50
|
||||
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/homeassistant/components/epion/ @lhgravendeel
|
||||
/tests/components/epion/ @lhgravendeel
|
||||
/homeassistant/components/epson/ @pszafer
|
||||
/tests/components/epson/ @pszafer
|
||||
/homeassistant/components/epsonworkforce/ @ThaStealth
|
||||
/homeassistant/components/eq3btsmart/ @eulemitkeule @dbuezas
|
||||
/tests/components/eq3btsmart/ @eulemitkeule @dbuezas
|
||||
/homeassistant/components/escea/ @lazdavila
|
||||
/tests/components/escea/ @lazdavila
|
||||
/homeassistant/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
|
||||
|
@ -435,8 +462,8 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/forked_daapd/ @uvjustin
|
||||
/tests/components/forked_daapd/ @uvjustin
|
||||
/homeassistant/components/fortios/ @kimfrellsen
|
||||
/homeassistant/components/foscam/ @skgsergio @krmarien
|
||||
/tests/components/foscam/ @skgsergio @krmarien
|
||||
/homeassistant/components/foscam/ @krmarien
|
||||
/tests/components/foscam/ @krmarien
|
||||
/homeassistant/components/freebox/ @hacf-fr @Quentame
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
|
@ -574,6 +601,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/homekit_controller/ @Jc2k @bdraco
|
||||
/homeassistant/components/homematic/ @pvizeli
|
||||
/tests/components/homematic/ @pvizeli
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th
|
||||
/tests/components/homematicip_cloud/ @hahn-th
|
||||
/homeassistant/components/homewizard/ @DCSBL
|
||||
/tests/components/homewizard/ @DCSBL
|
||||
/homeassistant/components/honeywell/ @rdfurman @mkmer
|
||||
|
@ -661,8 +690,8 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/iqvia/ @bachya
|
||||
/tests/components/iqvia/ @bachya
|
||||
/homeassistant/components/irish_rail_transport/ @ttroy50
|
||||
/homeassistant/components/islamic_prayer_times/ @engrbm87
|
||||
/tests/components/islamic_prayer_times/ @engrbm87
|
||||
/homeassistant/components/islamic_prayer_times/ @engrbm87 @cpfair
|
||||
/tests/components/islamic_prayer_times/ @engrbm87 @cpfair
|
||||
/homeassistant/components/iss/ @DurgNomis-drol
|
||||
/tests/components/iss/ @DurgNomis-drol
|
||||
/homeassistant/components/isy994/ @bdraco @shbatm
|
||||
|
@ -731,7 +760,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/leaone/ @bdraco
|
||||
/homeassistant/components/led_ble/ @bdraco
|
||||
/tests/components/led_ble/ @bdraco
|
||||
/homeassistant/components/lg_netcast/ @Drafteed
|
||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/light/ @home-assistant/core
|
||||
|
@ -847,8 +877,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/motioneye/ @dermotduffy
|
||||
/homeassistant/components/motionmount/ @RJPoelstra
|
||||
/tests/components/motionmount/ @RJPoelstra
|
||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh
|
||||
/tests/components/mqtt/ @emontnemery @jbouwh
|
||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
|
@ -1001,8 +1031,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/persistent_notification/ @home-assistant/core
|
||||
/homeassistant/components/philips_js/ @elupus
|
||||
/tests/components/philips_js/ @elupus
|
||||
/homeassistant/components/pi_hole/ @johnluetke @shenxn
|
||||
/tests/components/pi_hole/ @johnluetke @shenxn
|
||||
/homeassistant/components/pi_hole/ @shenxn
|
||||
/tests/components/pi_hole/ @shenxn
|
||||
/homeassistant/components/picnic/ @corneyl
|
||||
/tests/components/picnic/ @corneyl
|
||||
/homeassistant/components/pilight/ @trekky12
|
||||
|
@ -1056,8 +1086,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/pvpc_hourly_pricing/ @azogue
|
||||
/homeassistant/components/qbittorrent/ @geoffreylagaisse @finder39
|
||||
/tests/components/qbittorrent/ @geoffreylagaisse @finder39
|
||||
/homeassistant/components/qingping/ @bdraco @skgsergio
|
||||
/tests/components/qingping/ @bdraco @skgsergio
|
||||
/homeassistant/components/qingping/ @bdraco
|
||||
/tests/components/qingping/ @bdraco
|
||||
/homeassistant/components/qld_bushfire/ @exxamalte
|
||||
/tests/components/qld_bushfire/ @exxamalte
|
||||
/homeassistant/components/qnap/ @disforw
|
||||
|
@ -1156,11 +1186,13 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/ruuvitag_ble/ @akx
|
||||
/homeassistant/components/rympro/ @OnFreund @elad-bar @maorcc
|
||||
/tests/components/rympro/ @OnFreund @elad-bar @maorcc
|
||||
/homeassistant/components/sabnzbd/ @shaiu
|
||||
/tests/components/sabnzbd/ @shaiu
|
||||
/homeassistant/components/sabnzbd/ @shaiu @jpbede
|
||||
/tests/components/sabnzbd/ @shaiu @jpbede
|
||||
/homeassistant/components/saj/ @fredericvl
|
||||
/homeassistant/components/samsungtv/ @chemelli74 @epenet
|
||||
/tests/components/samsungtv/ @chemelli74 @epenet
|
||||
/homeassistant/components/sanix/ @tomaszsluszniak
|
||||
/tests/components/sanix/ @tomaszsluszniak
|
||||
/homeassistant/components/scene/ @home-assistant/core
|
||||
/tests/components/scene/ @home-assistant/core
|
||||
/homeassistant/components/schedule/ @home-assistant/core
|
||||
|
@ -1247,14 +1279,15 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/smhi/ @gjohansson-ST
|
||||
/tests/components/smhi/ @gjohansson-ST
|
||||
/homeassistant/components/sms/ @ocalvo
|
||||
/tests/components/sms/ @ocalvo
|
||||
/homeassistant/components/snapcast/ @luar123
|
||||
/tests/components/snapcast/ @luar123
|
||||
/homeassistant/components/snmp/ @nmaggioni
|
||||
/tests/components/snmp/ @nmaggioni
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck
|
||||
/tests/components/solaredge/ @frenck
|
||||
/homeassistant/components/solaredge/ @frenck @bdraco
|
||||
/tests/components/solaredge/ @frenck @bdraco
|
||||
/homeassistant/components/solaredge_local/ @drobtravels @scheric
|
||||
/homeassistant/components/solarlog/ @Ernst79
|
||||
/tests/components/solarlog/ @Ernst79
|
||||
|
@ -1266,8 +1299,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/sonarr/ @ctalkington
|
||||
/homeassistant/components/songpal/ @rytilahti @shenxn
|
||||
/tests/components/songpal/ @rytilahti @shenxn
|
||||
/homeassistant/components/sonos/ @jjlawren
|
||||
/tests/components/sonos/ @jjlawren
|
||||
/homeassistant/components/sonos/ @jjlawren @peterager
|
||||
/tests/components/sonos/ @jjlawren @peterager
|
||||
/homeassistant/components/soundtouch/ @kroimon
|
||||
/tests/components/soundtouch/ @kroimon
|
||||
/homeassistant/components/spaceapi/ @fabaff
|
||||
|
@ -1551,8 +1584,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/wiz/ @sbidy
|
||||
/homeassistant/components/wled/ @frenck
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wolflink/ @adamkrol93
|
||||
/tests/components/wolflink/ @adamkrol93
|
||||
/homeassistant/components/wolflink/ @adamkrol93 @mtielen
|
||||
/tests/components/wolflink/ @adamkrol93 @mtielen
|
||||
/homeassistant/components/workday/ @fabaff @gjohansson-ST
|
||||
/tests/components/workday/ @fabaff @gjohansson-ST
|
||||
/homeassistant/components/worldclock/ @fabaff
|
||||
|
|
|
@ -12,7 +12,7 @@ ENV \
|
|||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.1.24
|
||||
RUN pip3 install uv==0.1.35
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@ -30,14 +30,10 @@ RUN \
|
|||
uv pip install homeassistant/home_assistant_*.whl; \
|
||||
fi \
|
||||
&& if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||
LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \
|
||||
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
|
||||
linux32 uv pip install \
|
||||
--no-build \
|
||||
-r homeassistant/requirements_all.txt; \
|
||||
else \
|
||||
LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \
|
||||
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
|
||||
uv pip install \
|
||||
--no-build \
|
||||
-r homeassistant/requirements_all.txt; \
|
||||
|
|
|
@ -22,6 +22,7 @@ RUN \
|
|||
libavcodec-dev \
|
||||
libavdevice-dev \
|
||||
libavutil-dev \
|
||||
libgammu-dev \
|
||||
libswscale-dev \
|
||||
libswresample-dev \
|
||||
libavfilter-dev \
|
||||
|
|
|
@ -146,9 +146,7 @@ def get_arguments() -> argparse.Namespace:
|
|||
help="Skips validation of operating system",
|
||||
)
|
||||
|
||||
arguments = parser.parse_args()
|
||||
|
||||
return arguments
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def check_threads() -> None:
|
||||
|
|
|
@ -28,6 +28,7 @@ from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN, REFRESH_TOKEN_EXPIRA
|
|||
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
|
||||
from .models import AuthFlowResult
|
||||
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
|
||||
from .session import SessionManager
|
||||
|
||||
EVENT_USER_ADDED = "user_added"
|
||||
EVENT_USER_UPDATED = "user_updated"
|
||||
|
@ -85,7 +86,7 @@ async def auth_manager_from_config(
|
|||
module_hash[module.id] = module
|
||||
|
||||
manager = AuthManager(hass, store, provider_hash, module_hash)
|
||||
manager.async_setup()
|
||||
await manager.async_setup()
|
||||
return manager
|
||||
|
||||
|
||||
|
@ -180,9 +181,9 @@ class AuthManager:
|
|||
self._remove_expired_job = HassJob(
|
||||
self._async_remove_expired_refresh_tokens, job_type=HassJobType.Callback
|
||||
)
|
||||
self.session = SessionManager(hass, self)
|
||||
|
||||
@callback
|
||||
def async_setup(self) -> None:
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the auth manager."""
|
||||
hass = self.hass
|
||||
hass.async_add_shutdown_job(
|
||||
|
@ -191,6 +192,7 @@ class AuthManager:
|
|||
)
|
||||
)
|
||||
self._async_track_next_refresh_token_expiration()
|
||||
await self.session.async_setup()
|
||||
|
||||
@property
|
||||
def auth_providers(self) -> list[AuthProvider]:
|
||||
|
|
|
@ -78,7 +78,7 @@ class _PyJWTWithVerify(PyJWT):
|
|||
key: str,
|
||||
algorithms: list[str],
|
||||
issuer: str | None = None,
|
||||
leeway: int | float | timedelta = 0,
|
||||
leeway: float | timedelta = 0,
|
||||
options: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Verify a JWT's signature and claims."""
|
||||
|
|
|
@ -3,8 +3,9 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from functools import cached_property
|
||||
import secrets
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
from typing import Any, NamedTuple
|
||||
import uuid
|
||||
|
||||
import attr
|
||||
|
@ -18,12 +19,6 @@ from homeassistant.util import dt as dt_util
|
|||
from . import permissions as perm_mdl
|
||||
from .const import GROUP_ID_ADMIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from functools import cached_property
|
||||
else:
|
||||
from homeassistant.backports.functools import cached_property
|
||||
|
||||
|
||||
TOKEN_TYPE_NORMAL = "normal"
|
||||
TOKEN_TYPE_SYSTEM = "system"
|
||||
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
|
||||
|
@ -91,11 +86,7 @@ class User:
|
|||
def invalidate_cache(self) -> None:
|
||||
"""Invalidate permission and is_admin cache."""
|
||||
for attr_to_invalidate in ("permissions", "is_admin"):
|
||||
# try is must more efficient than suppress
|
||||
try: # noqa: SIM105
|
||||
delattr(self, attr_to_invalidate)
|
||||
except AttributeError:
|
||||
pass
|
||||
self.__dict__.pop(attr_to_invalidate, None)
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
|
@ -64,7 +63,7 @@ class PolicyPermissions(AbstractPermissions):
|
|||
"""Return a function that can test entity access."""
|
||||
return compile_entities(self._policy.get(CAT_ENTITIES), self._perm_lookup)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other: object) -> bool:
|
||||
"""Equals check."""
|
||||
return isinstance(other, PolicyPermissions) and other._policy == self._policy
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
from typing import Any, Final
|
||||
|
||||
from homeassistant.const import (
|
||||
EVENT_COMPONENT_LOADED,
|
||||
|
@ -21,10 +21,11 @@ from homeassistant.helpers.area_registry import EVENT_AREA_REGISTRY_UPDATED
|
|||
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.issue_registry import EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED
|
||||
from homeassistant.util.event_type import EventType
|
||||
|
||||
# These are events that do not contain any sensitive data
|
||||
# Except for state_changed, which is handled accordingly.
|
||||
SUBSCRIBE_ALLOWLIST: Final[set[str]] = {
|
||||
SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
|
||||
EVENT_AREA_REGISTRY_UPDATED,
|
||||
EVENT_COMPONENT_LOADED,
|
||||
EVENT_CORE_CONFIG_UPDATE,
|
||||
|
|
|
@ -0,0 +1,205 @@
|
|||
"""Session auth module."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import secrets
|
||||
from typing import TYPE_CHECKING, Final, TypedDict
|
||||
|
||||
from aiohttp.web import Request
|
||||
from aiohttp_session import Session, get_session, new_session
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .models import RefreshToken
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AuthManager
|
||||
|
||||
|
||||
TEMP_TIMEOUT = timedelta(minutes=5)
|
||||
TEMP_TIMEOUT_SECONDS = TEMP_TIMEOUT.total_seconds()
|
||||
|
||||
SESSION_ID = "id"
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_KEY = "auth.session"
|
||||
|
||||
|
||||
class StrictConnectionTempSessionData:
|
||||
"""Data for accessing unauthorized resources for a short period of time."""
|
||||
|
||||
__slots__ = ("cancel_remove", "absolute_expiry")
|
||||
|
||||
def __init__(self, cancel_remove: CALLBACK_TYPE) -> None:
|
||||
"""Initialize the temp session data."""
|
||||
self.cancel_remove: Final[CALLBACK_TYPE] = cancel_remove
|
||||
self.absolute_expiry: Final[datetime] = dt_util.utcnow() + TEMP_TIMEOUT
|
||||
|
||||
|
||||
class StoreData(TypedDict):
|
||||
"""Data to store."""
|
||||
|
||||
unauthorized_sessions: dict[str, str]
|
||||
key: str
|
||||
|
||||
|
||||
class SessionManager:
|
||||
"""Session manager."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, auth: AuthManager) -> None:
|
||||
"""Initialize the strict connection manager."""
|
||||
self._auth = auth
|
||||
self._hass = hass
|
||||
self._temp_sessions: dict[str, StrictConnectionTempSessionData] = {}
|
||||
self._strict_connection_sessions: dict[str, str] = {}
|
||||
self._store = Store[StoreData](
|
||||
hass, STORAGE_VERSION, STORAGE_KEY, private=True, atomic_writes=True
|
||||
)
|
||||
self._key: str | None = None
|
||||
self._refresh_token_revoke_callbacks: dict[str, CALLBACK_TYPE] = {}
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
"""Return the encryption key."""
|
||||
if self._key is None:
|
||||
self._key = Fernet.generate_key().decode()
|
||||
self._async_schedule_save()
|
||||
return self._key
|
||||
|
||||
async def async_validate_request_for_strict_connection_session(
|
||||
self,
|
||||
request: Request,
|
||||
) -> bool:
|
||||
"""Check if a request has a valid strict connection session."""
|
||||
session = await get_session(request)
|
||||
if session.new or session.empty:
|
||||
return False
|
||||
result = self.async_validate_strict_connection_session(session)
|
||||
if result is False:
|
||||
session.invalidate()
|
||||
return result
|
||||
|
||||
@callback
|
||||
def async_validate_strict_connection_session(
|
||||
self,
|
||||
session: Session,
|
||||
) -> bool:
|
||||
"""Validate a strict connection session."""
|
||||
if not (session_id := session.get(SESSION_ID)):
|
||||
return False
|
||||
|
||||
if token_id := self._strict_connection_sessions.get(session_id):
|
||||
if self._auth.async_get_refresh_token(token_id):
|
||||
return True
|
||||
# refresh token is invalid, delete entry
|
||||
self._strict_connection_sessions.pop(session_id)
|
||||
self._async_schedule_save()
|
||||
|
||||
if data := self._temp_sessions.get(session_id):
|
||||
if dt_util.utcnow() <= data.absolute_expiry:
|
||||
return True
|
||||
# session expired, delete entry
|
||||
self._temp_sessions.pop(session_id).cancel_remove()
|
||||
|
||||
return False
|
||||
|
||||
@callback
|
||||
def _async_register_revoke_token_callback(self, refresh_token_id: str) -> None:
|
||||
"""Register a callback to revoke all sessions for a refresh token."""
|
||||
if refresh_token_id in self._refresh_token_revoke_callbacks:
|
||||
return
|
||||
|
||||
@callback
|
||||
def async_invalidate_auth_sessions() -> None:
|
||||
"""Invalidate all sessions for a refresh token."""
|
||||
self._strict_connection_sessions = {
|
||||
session_id: token_id
|
||||
for session_id, token_id in self._strict_connection_sessions.items()
|
||||
if token_id != refresh_token_id
|
||||
}
|
||||
self._async_schedule_save()
|
||||
|
||||
self._refresh_token_revoke_callbacks[refresh_token_id] = (
|
||||
self._auth.async_register_revoke_token_callback(
|
||||
refresh_token_id, async_invalidate_auth_sessions
|
||||
)
|
||||
)
|
||||
|
||||
async def async_create_session(
|
||||
self,
|
||||
request: Request,
|
||||
refresh_token: RefreshToken,
|
||||
) -> None:
|
||||
"""Create new session for given refresh token.
|
||||
|
||||
Caller needs to make sure that the refresh token is valid.
|
||||
By creating a session, we are implicitly revoking all other
|
||||
sessions for the given refresh token as there is one refresh
|
||||
token per device/user case.
|
||||
"""
|
||||
self._strict_connection_sessions = {
|
||||
session_id: token_id
|
||||
for session_id, token_id in self._strict_connection_sessions.items()
|
||||
if token_id != refresh_token.id
|
||||
}
|
||||
|
||||
self._async_register_revoke_token_callback(refresh_token.id)
|
||||
session_id = await self._async_create_new_session(request)
|
||||
self._strict_connection_sessions[session_id] = refresh_token.id
|
||||
self._async_schedule_save()
|
||||
|
||||
async def async_create_temp_unauthorized_session(self, request: Request) -> None:
|
||||
"""Create a temporary unauthorized session."""
|
||||
session_id = await self._async_create_new_session(
|
||||
request, max_age=int(TEMP_TIMEOUT_SECONDS)
|
||||
)
|
||||
|
||||
@callback
|
||||
def remove(_: datetime) -> None:
|
||||
self._temp_sessions.pop(session_id, None)
|
||||
|
||||
self._temp_sessions[session_id] = StrictConnectionTempSessionData(
|
||||
async_call_later(self._hass, TEMP_TIMEOUT_SECONDS, remove)
|
||||
)
|
||||
|
||||
async def _async_create_new_session(
|
||||
self,
|
||||
request: Request,
|
||||
*,
|
||||
max_age: int | None = None,
|
||||
) -> str:
|
||||
session_id = secrets.token_hex(64)
|
||||
|
||||
session = await new_session(request)
|
||||
session[SESSION_ID] = session_id
|
||||
if max_age is not None:
|
||||
session.max_age = max_age
|
||||
return session_id
|
||||
|
||||
@callback
|
||||
def _async_schedule_save(self, delay: float = 1) -> None:
|
||||
"""Save sessions."""
|
||||
self._store.async_delay_save(self._data_to_save, delay)
|
||||
|
||||
@callback
|
||||
def _data_to_save(self) -> StoreData:
|
||||
"""Return the data to store."""
|
||||
return StoreData(
|
||||
unauthorized_sessions=self._strict_connection_sessions,
|
||||
key=self.key,
|
||||
)
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up session manager."""
|
||||
data = await self._store.async_load()
|
||||
if data is None:
|
||||
return
|
||||
|
||||
self._key = data["key"]
|
||||
self._strict_connection_sessions = data["unauthorized_sessions"]
|
||||
for token_id in self._strict_connection_sessions.values():
|
||||
self._async_register_revoke_token_callback(token_id)
|
|
@ -9,8 +9,21 @@ import it.
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
from enum import StrEnum as _StrEnum
|
||||
from functools import partial
|
||||
|
||||
__all__ = [
|
||||
"StrEnum",
|
||||
]
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedAlias,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
# StrEnum deprecated as of 2024.5 use enum.StrEnum instead.
|
||||
_DEPRECATED_StrEnum = DeprecatedAlias(_StrEnum, "enum.StrEnum", "2025.5")
|
||||
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
|
|
@ -1,79 +1,30 @@
|
|||
"""Functools backports from standard lib."""
|
||||
"""Functools backports from standard lib.
|
||||
|
||||
# This file contains parts of Python's module wrapper
|
||||
# for the _functools C module
|
||||
# to allow utilities written in Python to be added
|
||||
# to the functools module.
|
||||
# Written by Nick Coghlan <ncoghlan at gmail.com>,
|
||||
# Raymond Hettinger <python at rcn.com>,
|
||||
# and Łukasz Langa <lukasz at langa.pl>.
|
||||
# Copyright © 2001-2023 Python Software Foundation; All Rights Reserved
|
||||
This file contained the backport of the cached_property implementation of Python 3.12.
|
||||
|
||||
Since we have dropped support for Python 3.11, we can remove this backport.
|
||||
This file is kept for now to avoid breaking custom components that might
|
||||
import it.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from types import GenericAlias
|
||||
from typing import Any, Generic, Self, TypeVar, overload
|
||||
from functools import cached_property as _cached_property, partial
|
||||
|
||||
_T = TypeVar("_T")
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedAlias,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
# cached_property deprecated as of 2024.5 use functools.cached_property instead.
|
||||
_DEPRECATED_cached_property = DeprecatedAlias(
|
||||
_cached_property, "functools.cached_property", "2025.5"
|
||||
)
|
||||
|
||||
class cached_property(Generic[_T]):
|
||||
"""Backport of Python 3.12's cached_property.
|
||||
|
||||
Includes https://github.com/python/cpython/pull/101890/files
|
||||
"""
|
||||
|
||||
def __init__(self, func: Callable[[Any], _T]) -> None:
|
||||
"""Initialize."""
|
||||
self.func: Callable[[Any], _T] = func
|
||||
self.attrname: str | None = None
|
||||
self.__doc__ = func.__doc__
|
||||
|
||||
def __set_name__(self, owner: type[Any], name: str) -> None:
|
||||
"""Set name."""
|
||||
if self.attrname is None:
|
||||
self.attrname = name
|
||||
elif name != self.attrname:
|
||||
raise TypeError(
|
||||
"Cannot assign the same cached_property to two different names "
|
||||
f"({self.attrname!r} and {name!r})."
|
||||
)
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: Any, owner: type[Any] | None = None) -> _T: ...
|
||||
|
||||
def __get__(
|
||||
self, instance: Any | None, owner: type[Any] | None = None
|
||||
) -> _T | Self:
|
||||
"""Get."""
|
||||
if instance is None:
|
||||
return self
|
||||
if self.attrname is None:
|
||||
raise TypeError(
|
||||
"Cannot use cached_property instance without calling __set_name__ on it."
|
||||
)
|
||||
try:
|
||||
cache = instance.__dict__
|
||||
# not all objects have __dict__ (e.g. class defines slots)
|
||||
except AttributeError:
|
||||
msg = (
|
||||
f"No '__dict__' attribute on {type(instance).__name__!r} "
|
||||
f"instance to cache {self.attrname!r} property."
|
||||
)
|
||||
raise TypeError(msg) from None
|
||||
val = self.func(instance)
|
||||
try:
|
||||
cache[self.attrname] = val
|
||||
except TypeError:
|
||||
msg = (
|
||||
f"The '__dict__' attribute on {type(instance).__name__!r} instance "
|
||||
f"does not support item assignment for caching {self.attrname!r} property."
|
||||
)
|
||||
raise TypeError(msg) from None
|
||||
return val
|
||||
|
||||
__class_getitem__ = classmethod(GenericAlias) # type: ignore[var-annotated]
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
|
|
@ -1,9 +1,36 @@
|
|||
"""Block blocking calls being done in asyncio."""
|
||||
|
||||
from contextlib import suppress
|
||||
from http.client import HTTPConnection
|
||||
import importlib
|
||||
import sys
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from .util.async_ import protect_loop
|
||||
from .helpers.frame import get_current_frame
|
||||
from .util.loop import protect_loop
|
||||
|
||||
_IN_TESTS = "unittest" in sys.modules
|
||||
|
||||
|
||||
def _check_import_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
# If the module is already imported, we can ignore it.
|
||||
return bool((args := mapped_args.get("args")) and args[0] in sys.modules)
|
||||
|
||||
|
||||
def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
#
|
||||
# Avoid extracting the stack unless we need to since it
|
||||
# will have to access the linecache which can do blocking
|
||||
# I/O and we are trying to avoid blocking calls.
|
||||
#
|
||||
# frame[0] is us
|
||||
# frame[1] is check_loop
|
||||
# frame[2] is protected_loop_func
|
||||
# frame[3] is the offender
|
||||
with suppress(ValueError):
|
||||
return get_current_frame(4).f_code.co_filename.endswith("pydevd.py")
|
||||
return False
|
||||
|
||||
|
||||
def enable() -> None:
|
||||
|
@ -14,8 +41,20 @@ def enable() -> None:
|
|||
)
|
||||
|
||||
# Prevent sleeping in event loop. Non-strict since 2022.02
|
||||
time.sleep = protect_loop(time.sleep, strict=False)
|
||||
time.sleep = protect_loop(
|
||||
time.sleep, strict=False, check_allowed=_check_sleep_call_allowed
|
||||
)
|
||||
|
||||
# Currently disabled. pytz doing I/O when getting timezone.
|
||||
# Prevent files being opened inside the event loop
|
||||
# builtins.open = protect_loop(builtins.open)
|
||||
|
||||
if not _IN_TESTS:
|
||||
# unittest uses `importlib.import_module` to do mocking
|
||||
# so we cannot protect it if we are running tests
|
||||
importlib.import_module = protect_loop(
|
||||
importlib.import_module,
|
||||
strict_core=False,
|
||||
strict=False,
|
||||
check_allowed=_check_import_call_allowed,
|
||||
)
|
||||
|
|
|
@ -23,7 +23,14 @@ import cryptography.hazmat.backends.openssl.backend # noqa: F401
|
|||
import voluptuous as vol
|
||||
import yarl
|
||||
|
||||
from . import config as conf_util, config_entries, core, loader, requirements
|
||||
from . import (
|
||||
block_async_io,
|
||||
config as conf_util,
|
||||
config_entries,
|
||||
core,
|
||||
loader,
|
||||
requirements,
|
||||
)
|
||||
|
||||
# Pre-import frontend deps which have no requirements here to avoid
|
||||
# loading them at run time and blocking the event loop. We do this ahead
|
||||
|
@ -216,6 +223,7 @@ SETUP_ORDER = (
|
|||
# If they do not exist they will not be loaded
|
||||
#
|
||||
PRELOAD_STORAGE = [
|
||||
"core.logger",
|
||||
"core.network",
|
||||
"http.auth",
|
||||
"image",
|
||||
|
@ -245,6 +253,9 @@ async def async_setup_hass(
|
|||
runtime_config.log_no_color,
|
||||
)
|
||||
|
||||
if runtime_config.debug or hass.loop.get_debug():
|
||||
hass.config.debug = True
|
||||
|
||||
hass.config.safe_mode = runtime_config.safe_mode
|
||||
hass.config.skip_pip = runtime_config.skip_pip
|
||||
hass.config.skip_pip_packages = runtime_config.skip_pip_packages
|
||||
|
@ -260,6 +271,8 @@ async def async_setup_hass(
|
|||
_LOGGER.info("Config directory: %s", runtime_config.config_dir)
|
||||
|
||||
loader.async_setup(hass)
|
||||
block_async_io.enable()
|
||||
|
||||
config_dict = None
|
||||
basic_setup_success = False
|
||||
|
||||
|
@ -306,6 +319,7 @@ async def async_setup_hass(
|
|||
hass = core.HomeAssistant(old_config.config_dir)
|
||||
if old_logging:
|
||||
hass.data[DATA_LOGGING] = old_logging
|
||||
hass.config.debug = old_config.debug
|
||||
hass.config.skip_pip = old_config.skip_pip
|
||||
hass.config.skip_pip_packages = old_config.skip_pip_packages
|
||||
hass.config.internal_url = old_config.internal_url
|
||||
|
@ -564,7 +578,7 @@ def async_enable_logging(
|
|||
err_log_path, when="midnight", backupCount=log_rotate_days
|
||||
)
|
||||
else:
|
||||
err_handler = logging.handlers.RotatingFileHandler(
|
||||
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
|
||||
err_log_path, backupCount=1
|
||||
)
|
||||
|
||||
|
@ -588,6 +602,19 @@ def async_enable_logging(
|
|||
async_activate_log_queue_handler(hass)
|
||||
|
||||
|
||||
class _RotatingFileHandlerWithoutShouldRollOver(logging.handlers.RotatingFileHandler):
|
||||
"""RotatingFileHandler that does not check if it should roll over on every log."""
|
||||
|
||||
def shouldRollover(self, record: logging.LogRecord) -> bool:
|
||||
"""Never roll over.
|
||||
|
||||
The shouldRollover check is expensive because it has to stat
|
||||
the log file for every log record. Since we do not set maxBytes
|
||||
the result of this check is always False.
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
async def async_mount_local_lib_path(config_dir: str) -> str:
|
||||
"""Add local library to Python Path.
|
||||
|
||||
|
@ -704,7 +731,7 @@ async def async_setup_multi_components(
|
|||
# to wait to be imported, and the sooner we can get the base platforms
|
||||
# loaded the sooner we can start loading the rest of the integrations.
|
||||
futures = {
|
||||
domain: hass.async_create_task(
|
||||
domain: hass.async_create_task_internal(
|
||||
async_setup_component(hass, domain, config),
|
||||
f"setup component {domain}",
|
||||
eager_start=True,
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "epson",
|
||||
"name": "Epson",
|
||||
"integrations": ["epson", "epsonworkforce"]
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"domain": "eq3",
|
||||
"name": "eQ-3",
|
||||
"integrations": ["maxcube"]
|
||||
"integrations": ["maxcube", "eq3btsmart"]
|
||||
}
|
||||
|
|
|
@ -2,14 +2,10 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from datetime import timedelta
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
|
||||
from aiohttp import ClientSession
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from accuweather import AccuWeather
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
@ -17,43 +13,70 @@ from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform
|
|||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import ATTR_FORECAST, CONF_FORECAST, DOMAIN, MANUFACTURER
|
||||
from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION
|
||||
from .coordinator import (
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AccuWeatherData:
|
||||
"""Data for AccuWeather integration."""
|
||||
|
||||
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
||||
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up AccuWeather as config entry."""
|
||||
api_key: str = entry.data[CONF_API_KEY]
|
||||
name: str = entry.data[CONF_NAME]
|
||||
assert entry.unique_id is not None
|
||||
location_key = entry.unique_id
|
||||
forecast: bool = entry.options.get(CONF_FORECAST, False)
|
||||
|
||||
_LOGGER.debug("Using location_key: %s, get forecast: %s", location_key, forecast)
|
||||
location_key = entry.unique_id
|
||||
|
||||
_LOGGER.debug("Using location_key: %s", location_key)
|
||||
|
||||
websession = async_get_clientsession(hass)
|
||||
accuweather = AccuWeather(api_key, websession, location_key=location_key)
|
||||
|
||||
coordinator = AccuWeatherDataUpdateCoordinator(
|
||||
hass, websession, api_key, location_key, forecast, name
|
||||
coordinator_observation = AccuWeatherObservationDataUpdateCoordinator(
|
||||
hass,
|
||||
accuweather,
|
||||
name,
|
||||
"observation",
|
||||
UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
hass,
|
||||
accuweather,
|
||||
name,
|
||||
"daily forecast",
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
)
|
||||
|
||||
await coordinator_observation.async_config_entry_first_refresh()
|
||||
await coordinator_daily_forecast.async_config_entry_first_refresh()
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = AccuWeatherData(
|
||||
coordinator_observation=coordinator_observation,
|
||||
coordinator_daily_forecast=coordinator_daily_forecast,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Remove ozone sensors from registry if they exist
|
||||
ent_reg = er.async_get(hass)
|
||||
for day in range(5):
|
||||
unique_id = f"{coordinator.location_key}-ozone-{day}"
|
||||
unique_id = f"{location_key}-ozone-{day}"
|
||||
if entity_id := ent_reg.async_get_entity_id(SENSOR_PLATFORM, DOMAIN, unique_id):
|
||||
_LOGGER.debug("Removing ozone sensor entity %s", entity_id)
|
||||
ent_reg.async_remove(entity_id)
|
||||
|
@ -74,65 +97,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
class AccuWeatherDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # pylint: disable=hass-enforce-coordinator-module
|
||||
"""Class to manage fetching AccuWeather data API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
session: ClientSession,
|
||||
api_key: str,
|
||||
location_key: str,
|
||||
forecast: bool,
|
||||
name: str,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.location_key = location_key
|
||||
self.forecast = forecast
|
||||
self.accuweather = AccuWeather(api_key, session, location_key=location_key)
|
||||
self.device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, location_key)},
|
||||
manufacturer=MANUFACTURER,
|
||||
name=name,
|
||||
# You don't need to provide specific details for the URL,
|
||||
# so passing in _ characters is fine if the location key
|
||||
# is correct
|
||||
configuration_url=(
|
||||
"http://accuweather.com/en/"
|
||||
f"_/_/{location_key}/"
|
||||
f"weather-forecast/{location_key}/"
|
||||
),
|
||||
)
|
||||
|
||||
# Enabling the forecast download increases the number of requests per data
|
||||
# update, we use 40 minutes for current condition only and 80 minutes for
|
||||
# current condition and forecast as update interval to not exceed allowed number
|
||||
# of requests. We have 50 requests allowed per day, so we use 36 and leave 14 as
|
||||
# a reserve for restarting HA.
|
||||
update_interval = timedelta(minutes=40)
|
||||
if self.forecast:
|
||||
update_interval *= 2
|
||||
_LOGGER.debug("Data will be update every %s", update_interval)
|
||||
|
||||
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update data via library."""
|
||||
forecast: list[dict[str, Any]] = []
|
||||
try:
|
||||
async with timeout(10):
|
||||
current = await self.accuweather.async_get_current_conditions()
|
||||
if self.forecast:
|
||||
forecast = await self.accuweather.async_get_daily_forecast()
|
||||
except (
|
||||
ApiError,
|
||||
ClientConnectorError,
|
||||
InvalidApiKeyError,
|
||||
RequestsExceededError,
|
||||
) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
return {**current, ATTR_FORECAST: forecast}
|
||||
|
|
|
@ -10,26 +10,12 @@ from aiohttp import ClientError
|
|||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaFlowFormStep,
|
||||
SchemaOptionsFlowHandler,
|
||||
)
|
||||
|
||||
from .const import CONF_FORECAST, DOMAIN
|
||||
|
||||
OPTIONS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_FORECAST, default=False): bool,
|
||||
}
|
||||
)
|
||||
OPTIONS_FLOW = {
|
||||
"init": SchemaFlowFormStep(OPTIONS_SCHEMA),
|
||||
}
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
@ -87,9 +73,3 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> SchemaOptionsFlowHandler:
|
||||
"""Options callback for AccuWeather."""
|
||||
return SchemaOptionsFlowHandler(config_entry, OPTIONS_FLOW)
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.components.weather import (
|
||||
|
@ -27,10 +28,8 @@ ATTR_CATEGORY: Final = "Category"
|
|||
ATTR_DIRECTION: Final = "Direction"
|
||||
ATTR_ENGLISH: Final = "English"
|
||||
ATTR_LEVEL: Final = "level"
|
||||
ATTR_FORECAST: Final = "forecast"
|
||||
ATTR_SPEED: Final = "Speed"
|
||||
ATTR_VALUE: Final = "Value"
|
||||
CONF_FORECAST: Final = "forecast"
|
||||
DOMAIN: Final = "accuweather"
|
||||
MANUFACTURER: Final = "AccuWeather, Inc."
|
||||
MAX_FORECAST_DAYS: Final = 4
|
||||
|
@ -56,3 +55,5 @@ CONDITION_MAP = {
|
|||
for cond_ha, cond_codes in CONDITION_CLASSES.items()
|
||||
for cond_code in cond_codes
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
|
|
|
@ -0,0 +1,124 @@
|
|||
"""The AccuWeather coordinator."""
|
||||
|
||||
from asyncio import timeout
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
DataUpdateCoordinator,
|
||||
TimestampDataUpdateCoordinator,
|
||||
UpdateFailed,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
|
||||
EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AccuWeatherObservationDataUpdateCoordinator(
|
||||
DataUpdateCoordinator[dict[str, Any]]
|
||||
):
|
||||
"""Class to manage fetching AccuWeather data API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
update_interval: timedelta,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
|
||||
self.device_info = _get_device_info(self.location_key, name)
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=f"{name} ({coordinator_type})",
|
||||
update_interval=update_interval,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update data via library."""
|
||||
try:
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_current_conditions()
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
TimestampDataUpdateCoordinator[list[dict[str, Any]]]
|
||||
):
|
||||
"""Class to manage fetching AccuWeather data API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
update_interval: timedelta,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
|
||||
self.device_info = _get_device_info(self.location_key, name)
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=f"{name} ({coordinator_type})",
|
||||
update_interval=update_interval,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> list[dict[str, Any]]:
|
||||
"""Update data via library."""
|
||||
try:
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_daily_forecast()
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _get_device_info(location_key: str, name: str) -> DeviceInfo:
|
||||
"""Get device info."""
|
||||
return DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, location_key)},
|
||||
manufacturer=MANUFACTURER,
|
||||
name=name,
|
||||
# You don't need to provide specific details for the URL,
|
||||
# so passing in _ characters is fine if the location key
|
||||
# is correct
|
||||
configuration_url=(
|
||||
"http://accuweather.com/en/"
|
||||
f"_/_/{location_key}/weather-forecast/{location_key}/"
|
||||
),
|
||||
)
|
|
@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntry
|
|||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import AccuWeatherDataUpdateCoordinator
|
||||
from . import AccuWeatherData
|
||||
from .const import DOMAIN
|
||||
|
||||
TO_REDACT = {CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE}
|
||||
|
@ -19,13 +19,9 @@ async def async_get_config_entry_diagnostics(
|
|||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator: AccuWeatherDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
accuweather_data: AccuWeatherData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
diagnostics_data = {
|
||||
return {
|
||||
"config_entry_data": async_redact_data(dict(config_entry.data), TO_REDACT),
|
||||
"coordinator_data": coordinator.data,
|
||||
"observation_data": accuweather_data.coordinator_observation.data,
|
||||
}
|
||||
|
||||
return diagnostics_data
|
||||
|
|
|
@ -8,6 +8,6 @@
|
|||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["accuweather==2.1.1"],
|
||||
"requirements": ["accuweather==3.0.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
|
|
@ -28,13 +28,12 @@ from homeassistant.core import HomeAssistant, callback
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AccuWeatherDataUpdateCoordinator
|
||||
from . import AccuWeatherData
|
||||
from .const import (
|
||||
API_METRIC,
|
||||
ATTR_CATEGORY,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_ENGLISH,
|
||||
ATTR_FORECAST,
|
||||
ATTR_LEVEL,
|
||||
ATTR_SPEED,
|
||||
ATTR_VALUE,
|
||||
|
@ -42,6 +41,10 @@ from .const import (
|
|||
DOMAIN,
|
||||
MAX_FORECAST_DAYS,
|
||||
)
|
||||
from .coordinator import (
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
@ -52,12 +55,18 @@ class AccuWeatherSensorDescription(SensorEntityDescription):
|
|||
|
||||
value_fn: Callable[[dict[str, Any]], str | int | float | None]
|
||||
attr_fn: Callable[[dict[str, Any]], dict[str, Any]] = lambda _: {}
|
||||
day: int | None = None
|
||||
|
||||
|
||||
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AccuWeatherForecastSensorDescription(AccuWeatherSensorDescription):
|
||||
"""Class describing AccuWeather sensor entities."""
|
||||
|
||||
day: int
|
||||
|
||||
|
||||
FORECAST_SENSOR_TYPES: tuple[AccuWeatherForecastSensorDescription, ...] = (
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="AirQuality",
|
||||
icon="mdi:air-filter",
|
||||
value_fn=lambda data: cast(str, data[ATTR_CATEGORY]),
|
||||
|
@ -69,7 +78,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="CloudCoverDay",
|
||||
icon="mdi:weather-cloudy",
|
||||
entity_registry_enabled_default=False,
|
||||
|
@ -81,7 +90,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="CloudCoverNight",
|
||||
icon="mdi:weather-cloudy",
|
||||
entity_registry_enabled_default=False,
|
||||
|
@ -93,7 +102,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="Grass",
|
||||
icon="mdi:grass",
|
||||
entity_registry_enabled_default=False,
|
||||
|
@ -106,7 +115,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="HoursOfSun",
|
||||
icon="mdi:weather-partly-cloudy",
|
||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
||||
|
@ -117,7 +126,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="LongPhraseDay",
|
||||
value_fn=lambda data: cast(str, data),
|
||||
translation_key=f"condition_day_{day}d",
|
||||
|
@ -126,7 +135,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="LongPhraseNight",
|
||||
value_fn=lambda data: cast(str, data),
|
||||
translation_key=f"condition_night_{day}d",
|
||||
|
@ -135,7 +144,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="Mold",
|
||||
icon="mdi:blur",
|
||||
entity_registry_enabled_default=False,
|
||||
|
@ -148,7 +157,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="Ragweed",
|
||||
icon="mdi:sprout",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
|
@ -161,7 +170,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="RealFeelTemperatureMax",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
|
@ -172,7 +181,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="RealFeelTemperatureMin",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
|
@ -183,7 +192,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="RealFeelTemperatureShadeMax",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_registry_enabled_default=False,
|
||||
|
@ -195,7 +204,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="RealFeelTemperatureShadeMin",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_registry_enabled_default=False,
|
||||
|
@ -207,7 +216,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="SolarIrradianceDay",
|
||||
icon="mdi:weather-sunny",
|
||||
entity_registry_enabled_default=False,
|
||||
|
@ -219,7 +228,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="SolarIrradianceNight",
|
||||
icon="mdi:weather-sunny",
|
||||
entity_registry_enabled_default=False,
|
||||
|
@ -231,7 +240,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="ThunderstormProbabilityDay",
|
||||
icon="mdi:weather-lightning",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
|
@ -242,7 +251,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="ThunderstormProbabilityNight",
|
||||
icon="mdi:weather-lightning",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
|
@ -253,7 +262,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="Tree",
|
||||
icon="mdi:tree-outline",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
|
@ -266,7 +275,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="UVIndex",
|
||||
icon="mdi:weather-sunny",
|
||||
native_unit_of_measurement=UV_INDEX,
|
||||
|
@ -278,7 +287,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="WindGustDay",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
entity_registry_enabled_default=False,
|
||||
|
@ -291,7 +300,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="WindGustNight",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
entity_registry_enabled_default=False,
|
||||
|
@ -304,7 +313,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="WindDay",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
|
@ -316,7 +325,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
),
|
||||
*(
|
||||
AccuWeatherSensorDescription(
|
||||
AccuWeatherForecastSensorDescription(
|
||||
key="WindNight",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
|
@ -453,25 +462,33 @@ async def async_setup_entry(
|
|||
) -> None:
|
||||
"""Add AccuWeather entities from a config_entry."""
|
||||
|
||||
coordinator: AccuWeatherDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
accuweather_data: AccuWeatherData = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
sensors = [
|
||||
AccuWeatherSensor(coordinator, description) for description in SENSOR_TYPES
|
||||
observation_coordinator: AccuWeatherObservationDataUpdateCoordinator = (
|
||||
accuweather_data.coordinator_observation
|
||||
)
|
||||
forecast_daily_coordinator: AccuWeatherDailyForecastDataUpdateCoordinator = (
|
||||
accuweather_data.coordinator_daily_forecast
|
||||
)
|
||||
|
||||
sensors: list[AccuWeatherSensor | AccuWeatherForecastSensor] = [
|
||||
AccuWeatherSensor(observation_coordinator, description)
|
||||
for description in SENSOR_TYPES
|
||||
]
|
||||
|
||||
if coordinator.forecast:
|
||||
for description in FORECAST_SENSOR_TYPES:
|
||||
# Some air quality/allergy sensors are only available for certain
|
||||
# locations.
|
||||
if description.key not in coordinator.data[ATTR_FORECAST][description.day]:
|
||||
continue
|
||||
sensors.append(AccuWeatherSensor(coordinator, description))
|
||||
sensors.extend(
|
||||
[
|
||||
AccuWeatherForecastSensor(forecast_daily_coordinator, description)
|
||||
for description in FORECAST_SENSOR_TYPES
|
||||
if description.key in forecast_daily_coordinator.data[description.day]
|
||||
]
|
||||
)
|
||||
|
||||
async_add_entities(sensors)
|
||||
|
||||
|
||||
class AccuWeatherSensor(
|
||||
CoordinatorEntity[AccuWeatherDataUpdateCoordinator], SensorEntity
|
||||
CoordinatorEntity[AccuWeatherObservationDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Define an AccuWeather entity."""
|
||||
|
||||
|
@ -481,22 +498,15 @@ class AccuWeatherSensor(
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AccuWeatherDataUpdateCoordinator,
|
||||
coordinator: AccuWeatherObservationDataUpdateCoordinator,
|
||||
description: AccuWeatherSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self.forecast_day = description.day
|
||||
|
||||
self.entity_description = description
|
||||
self._sensor_data = _get_sensor_data(
|
||||
coordinator.data, description.key, self.forecast_day
|
||||
)
|
||||
if self.forecast_day is not None:
|
||||
self._attr_unique_id = f"{coordinator.location_key}-{description.key}-{self.forecast_day}".lower()
|
||||
else:
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.location_key}-{description.key}".lower()
|
||||
)
|
||||
self._sensor_data = self._get_sensor_data(coordinator.data, description.key)
|
||||
self._attr_unique_id = f"{coordinator.location_key}-{description.key}".lower()
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
@property
|
||||
|
@ -507,30 +517,78 @@ class AccuWeatherSensor(
|
|||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
if self.forecast_day is not None:
|
||||
return self.entity_description.attr_fn(self._sensor_data)
|
||||
|
||||
return self.entity_description.attr_fn(self.coordinator.data)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle data update."""
|
||||
self._sensor_data = _get_sensor_data(
|
||||
self._sensor_data = self._get_sensor_data(
|
||||
self.coordinator.data, self.entity_description.key
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@staticmethod
|
||||
def _get_sensor_data(
|
||||
sensors: dict[str, Any],
|
||||
kind: str,
|
||||
) -> Any:
|
||||
"""Get sensor data."""
|
||||
if kind == "Precipitation":
|
||||
return sensors["PrecipitationSummary"]["PastHour"]
|
||||
|
||||
return sensors[kind]
|
||||
|
||||
|
||||
class AccuWeatherForecastSensor(
|
||||
CoordinatorEntity[AccuWeatherDailyForecastDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Define an AccuWeather entity."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
_attr_has_entity_name = True
|
||||
entity_description: AccuWeatherForecastSensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
description: AccuWeatherForecastSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.forecast_day = description.day
|
||||
self.entity_description = description
|
||||
self._sensor_data = self._get_sensor_data(
|
||||
coordinator.data, description.key, self.forecast_day
|
||||
)
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.location_key}-{description.key}-{self.forecast_day}".lower()
|
||||
)
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | int | float | None:
|
||||
"""Return the state."""
|
||||
return self.entity_description.value_fn(self._sensor_data)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
return self.entity_description.attr_fn(self._sensor_data)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle data update."""
|
||||
self._sensor_data = self._get_sensor_data(
|
||||
self.coordinator.data, self.entity_description.key, self.forecast_day
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
def _get_sensor_data(
|
||||
sensors: dict[str, Any],
|
||||
kind: str,
|
||||
forecast_day: int | None = None,
|
||||
) -> Any:
|
||||
"""Get sensor data."""
|
||||
if forecast_day is not None:
|
||||
return sensors[ATTR_FORECAST][forecast_day][kind]
|
||||
|
||||
if kind == "Precipitation":
|
||||
return sensors["PrecipitationSummary"]["PastHour"]
|
||||
|
||||
return sensors[kind]
|
||||
@staticmethod
|
||||
def _get_sensor_data(
|
||||
sensors: list[dict[str, Any]],
|
||||
kind: str,
|
||||
forecast_day: int,
|
||||
) -> Any:
|
||||
"""Get sensor data."""
|
||||
return sensors[forecast_day][kind]
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
}
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "Some sensors are not enabled by default. You can enable them in the entity registry after the integration configuration.\nWeather forecast is not enabled by default. You can enable it in the integration options."
|
||||
"default": "Some sensors are not enabled by default. You can enable them in the entity registry after the integration configuration."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
|
@ -790,16 +790,6 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"description": "Due to the limitations of the free version of the AccuWeather API key, when you enable weather forecast, data updates will be performed every 80 minutes instead of every 40 minutes.",
|
||||
"data": {
|
||||
"forecast": "Weather forecast"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"system_health": {
|
||||
"info": {
|
||||
"can_reach_server": "Reach AccuWeather server",
|
||||
|
|
|
@ -24,7 +24,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
|||
"""Get info for the info page."""
|
||||
remaining_requests = list(hass.data[DOMAIN].values())[
|
||||
0
|
||||
].accuweather.requests_remaining
|
||||
].coordinator_observation.accuweather.requests_remaining
|
||||
|
||||
return {
|
||||
"can_reach_server": system_health.async_check_can_reach_url(hass, ENDPOINT),
|
||||
|
|
|
@ -17,8 +17,8 @@ from homeassistant.components.weather import (
|
|||
ATTR_FORECAST_TIME,
|
||||
ATTR_FORECAST_UV_INDEX,
|
||||
ATTR_FORECAST_WIND_BEARING,
|
||||
CoordinatorWeatherEntity,
|
||||
Forecast,
|
||||
SingleCoordinatorWeatherEntity,
|
||||
WeatherEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
@ -31,19 +31,23 @@ from homeassistant.const import (
|
|||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import TimestampDataUpdateCoordinator
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
|
||||
from . import AccuWeatherDataUpdateCoordinator
|
||||
from . import AccuWeatherData
|
||||
from .const import (
|
||||
API_METRIC,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_FORECAST,
|
||||
ATTR_SPEED,
|
||||
ATTR_VALUE,
|
||||
ATTRIBUTION,
|
||||
CONDITION_MAP,
|
||||
DOMAIN,
|
||||
)
|
||||
from .coordinator import (
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
@ -52,106 +56,134 @@ async def async_setup_entry(
|
|||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Add a AccuWeather weather entity from a config_entry."""
|
||||
accuweather_data: AccuWeatherData = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
coordinator: AccuWeatherDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
async_add_entities([AccuWeatherEntity(coordinator)])
|
||||
async_add_entities([AccuWeatherEntity(accuweather_data)])
|
||||
|
||||
|
||||
class AccuWeatherEntity(
|
||||
SingleCoordinatorWeatherEntity[AccuWeatherDataUpdateCoordinator]
|
||||
CoordinatorWeatherEntity[
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
TimestampDataUpdateCoordinator,
|
||||
TimestampDataUpdateCoordinator,
|
||||
]
|
||||
):
|
||||
"""Define an AccuWeather entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, coordinator: AccuWeatherDataUpdateCoordinator) -> None:
|
||||
def __init__(self, accuweather_data: AccuWeatherData) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(
|
||||
observation_coordinator=accuweather_data.coordinator_observation,
|
||||
daily_coordinator=accuweather_data.coordinator_daily_forecast,
|
||||
)
|
||||
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
self._attr_native_pressure_unit = UnitOfPressure.HPA
|
||||
self._attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
self._attr_native_visibility_unit = UnitOfLength.KILOMETERS
|
||||
self._attr_native_wind_speed_unit = UnitOfSpeed.KILOMETERS_PER_HOUR
|
||||
self._attr_unique_id = coordinator.location_key
|
||||
self._attr_unique_id = accuweather_data.coordinator_observation.location_key
|
||||
self._attr_attribution = ATTRIBUTION
|
||||
self._attr_device_info = coordinator.device_info
|
||||
if self.coordinator.forecast:
|
||||
self._attr_supported_features = WeatherEntityFeature.FORECAST_DAILY
|
||||
self._attr_device_info = accuweather_data.coordinator_observation.device_info
|
||||
self._attr_supported_features = WeatherEntityFeature.FORECAST_DAILY
|
||||
|
||||
self.observation_coordinator = accuweather_data.coordinator_observation
|
||||
self.daily_coordinator = accuweather_data.coordinator_daily_forecast
|
||||
|
||||
@property
|
||||
def condition(self) -> str | None:
|
||||
"""Return the current condition."""
|
||||
return CONDITION_MAP.get(self.coordinator.data["WeatherIcon"])
|
||||
return CONDITION_MAP.get(self.observation_coordinator.data["WeatherIcon"])
|
||||
|
||||
@property
|
||||
def cloud_coverage(self) -> float:
|
||||
"""Return the Cloud coverage in %."""
|
||||
return cast(float, self.coordinator.data["CloudCover"])
|
||||
return cast(float, self.observation_coordinator.data["CloudCover"])
|
||||
|
||||
@property
|
||||
def native_apparent_temperature(self) -> float:
|
||||
"""Return the apparent temperature."""
|
||||
return cast(
|
||||
float, self.coordinator.data["ApparentTemperature"][API_METRIC][ATTR_VALUE]
|
||||
float,
|
||||
self.observation_coordinator.data["ApparentTemperature"][API_METRIC][
|
||||
ATTR_VALUE
|
||||
],
|
||||
)
|
||||
|
||||
@property
|
||||
def native_temperature(self) -> float:
|
||||
"""Return the temperature."""
|
||||
return cast(float, self.coordinator.data["Temperature"][API_METRIC][ATTR_VALUE])
|
||||
return cast(
|
||||
float,
|
||||
self.observation_coordinator.data["Temperature"][API_METRIC][ATTR_VALUE],
|
||||
)
|
||||
|
||||
@property
|
||||
def native_pressure(self) -> float:
|
||||
"""Return the pressure."""
|
||||
return cast(float, self.coordinator.data["Pressure"][API_METRIC][ATTR_VALUE])
|
||||
return cast(
|
||||
float, self.observation_coordinator.data["Pressure"][API_METRIC][ATTR_VALUE]
|
||||
)
|
||||
|
||||
@property
|
||||
def native_dew_point(self) -> float:
|
||||
"""Return the dew point."""
|
||||
return cast(float, self.coordinator.data["DewPoint"][API_METRIC][ATTR_VALUE])
|
||||
return cast(
|
||||
float, self.observation_coordinator.data["DewPoint"][API_METRIC][ATTR_VALUE]
|
||||
)
|
||||
|
||||
@property
|
||||
def humidity(self) -> int:
|
||||
"""Return the humidity."""
|
||||
return cast(int, self.coordinator.data["RelativeHumidity"])
|
||||
return cast(int, self.observation_coordinator.data["RelativeHumidity"])
|
||||
|
||||
@property
|
||||
def native_wind_gust_speed(self) -> float:
|
||||
"""Return the wind gust speed."""
|
||||
return cast(
|
||||
float, self.coordinator.data["WindGust"][ATTR_SPEED][API_METRIC][ATTR_VALUE]
|
||||
float,
|
||||
self.observation_coordinator.data["WindGust"][ATTR_SPEED][API_METRIC][
|
||||
ATTR_VALUE
|
||||
],
|
||||
)
|
||||
|
||||
@property
|
||||
def native_wind_speed(self) -> float:
|
||||
"""Return the wind speed."""
|
||||
return cast(
|
||||
float, self.coordinator.data["Wind"][ATTR_SPEED][API_METRIC][ATTR_VALUE]
|
||||
float,
|
||||
self.observation_coordinator.data["Wind"][ATTR_SPEED][API_METRIC][
|
||||
ATTR_VALUE
|
||||
],
|
||||
)
|
||||
|
||||
@property
|
||||
def wind_bearing(self) -> int:
|
||||
"""Return the wind bearing."""
|
||||
return cast(int, self.coordinator.data["Wind"][ATTR_DIRECTION]["Degrees"])
|
||||
return cast(
|
||||
int, self.observation_coordinator.data["Wind"][ATTR_DIRECTION]["Degrees"]
|
||||
)
|
||||
|
||||
@property
|
||||
def native_visibility(self) -> float:
|
||||
"""Return the visibility."""
|
||||
return cast(float, self.coordinator.data["Visibility"][API_METRIC][ATTR_VALUE])
|
||||
return cast(
|
||||
float,
|
||||
self.observation_coordinator.data["Visibility"][API_METRIC][ATTR_VALUE],
|
||||
)
|
||||
|
||||
@property
|
||||
def uv_index(self) -> float:
|
||||
"""Return the UV index."""
|
||||
return cast(float, self.coordinator.data["UVIndex"])
|
||||
return cast(float, self.observation_coordinator.data["UVIndex"])
|
||||
|
||||
@callback
|
||||
def _async_forecast_daily(self) -> list[Forecast] | None:
|
||||
"""Return the daily forecast in native units."""
|
||||
if not self.coordinator.forecast:
|
||||
return None
|
||||
# remap keys from library to keys understood by the weather component
|
||||
return [
|
||||
{
|
||||
ATTR_FORECAST_TIME: utc_from_timestamp(item["EpochDate"]).isoformat(),
|
||||
|
@ -175,5 +207,5 @@ class AccuWeatherEntity(
|
|||
ATTR_FORECAST_WIND_BEARING: item["WindDay"][ATTR_DIRECTION]["Degrees"],
|
||||
ATTR_FORECAST_CONDITION: CONDITION_MAP.get(item["IconDay"]),
|
||||
}
|
||||
for item in self.coordinator.data[ATTR_FORECAST]
|
||||
for item in self.daily_coordinator.data
|
||||
]
|
||||
|
|
|
@ -135,11 +135,15 @@ class AdaxDevice(ClimateEntity):
|
|||
class LocalAdaxDevice(ClimateEntity):
|
||||
"""Representation of a heater."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.HEAT]
|
||||
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
|
||||
_attr_hvac_mode = HVACMode.HEAT
|
||||
_attr_max_temp = 35
|
||||
_attr_min_temp = 5
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
|
@ -152,6 +156,14 @@ class LocalAdaxDevice(ClimateEntity):
|
|||
manufacturer="Adax",
|
||||
)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set hvac mode."""
|
||||
if hvac_mode == HVACMode.HEAT:
|
||||
temperature = self._attr_target_temperature or self._attr_min_temp
|
||||
await self._adax_data_handler.set_target_temperature(temperature)
|
||||
elif hvac_mode == HVACMode.OFF:
|
||||
await self._adax_data_handler.set_target_temperature(0)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
|
||||
|
@ -161,6 +173,14 @@ class LocalAdaxDevice(ClimateEntity):
|
|||
async def async_update(self) -> None:
|
||||
"""Get the latest data."""
|
||||
data = await self._adax_data_handler.get_status()
|
||||
self._attr_target_temperature = data["target_temperature"]
|
||||
self._attr_current_temperature = data["current_temperature"]
|
||||
self._attr_available = self._attr_current_temperature is not None
|
||||
if (target_temp := data["target_temperature"]) == 0:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
if target_temp == 0:
|
||||
self._attr_target_temperature = self._attr_min_temp
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
self._attr_target_temperature = target_temp
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
|
||||
import voluptuous as vol
|
||||
|
||||
|
@ -24,7 +26,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||
|
||||
from .const import (
|
||||
CONF_FORCE,
|
||||
DATA_ADGUARD_CLIENT,
|
||||
DOMAIN,
|
||||
SERVICE_ADD_URL,
|
||||
SERVICE_DISABLE_URL,
|
||||
|
@ -44,6 +45,14 @@ SERVICE_REFRESH_SCHEMA = vol.Schema(
|
|||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdGuardData:
|
||||
"""Adguard data type."""
|
||||
|
||||
client: AdGuardHome
|
||||
version: str
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up AdGuard Home from a config entry."""
|
||||
session = async_get_clientsession(hass, entry.data[CONF_VERIFY_SSL])
|
||||
|
@ -57,13 +66,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
session=session,
|
||||
)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {DATA_ADGUARD_CLIENT: adguard}
|
||||
|
||||
try:
|
||||
await adguard.version()
|
||||
version = await adguard.version()
|
||||
except AdGuardHomeConnectionError as exception:
|
||||
raise ConfigEntryNotReady from exception
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = AdGuardData(adguard, version)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
async def add_url(call: ServiceCall) -> None:
|
||||
|
|
|
@ -6,9 +6,6 @@ DOMAIN = "adguard"
|
|||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DATA_ADGUARD_CLIENT = "adguard_client"
|
||||
DATA_ADGUARD_VERSION = "adguard_version"
|
||||
|
||||
CONF_FORCE = "force"
|
||||
|
||||
SERVICE_ADD_URL = "add_url"
|
||||
|
|
|
@ -2,13 +2,14 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from adguardhome import AdGuardHome, AdGuardHomeError
|
||||
from adguardhome import AdGuardHomeError
|
||||
|
||||
from homeassistant.config_entries import SOURCE_HASSIO, ConfigEntry
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DATA_ADGUARD_VERSION, DOMAIN, LOGGER
|
||||
from . import AdGuardData
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
|
||||
class AdGuardHomeEntity(Entity):
|
||||
|
@ -19,12 +20,13 @@ class AdGuardHomeEntity(Entity):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
adguard: AdGuardHome,
|
||||
data: AdGuardData,
|
||||
entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the AdGuard Home entity."""
|
||||
self._entry = entry
|
||||
self.adguard = adguard
|
||||
self.data = data
|
||||
self.adguard = data.client
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
|
@ -68,8 +70,6 @@ class AdGuardHomeEntity(Entity):
|
|||
},
|
||||
manufacturer="AdGuard Team",
|
||||
name="AdGuard Home",
|
||||
sw_version=self.hass.data[DOMAIN][self._entry.entry_id].get(
|
||||
DATA_ADGUARD_VERSION
|
||||
),
|
||||
sw_version=self.data.version,
|
||||
configuration_url=config_url,
|
||||
)
|
||||
|
|
|
@ -7,16 +7,16 @@ from dataclasses import dataclass
|
|||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
|
||||
from adguardhome import AdGuardHome
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import PERCENTAGE, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DATA_ADGUARD_CLIENT, DATA_ADGUARD_VERSION, DOMAIN
|
||||
from . import AdGuardData
|
||||
from .const import DOMAIN
|
||||
from .entity import AdGuardHomeEntity
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=300)
|
||||
|
@ -89,17 +89,10 @@ async def async_setup_entry(
|
|||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AdGuard Home sensor based on a config entry."""
|
||||
adguard = hass.data[DOMAIN][entry.entry_id][DATA_ADGUARD_CLIENT]
|
||||
|
||||
try:
|
||||
version = await adguard.version()
|
||||
except AdGuardHomeConnectionError as exception:
|
||||
raise PlatformNotReady from exception
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id][DATA_ADGUARD_VERSION] = version
|
||||
data: AdGuardData = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
async_add_entities(
|
||||
[AdGuardHomeSensor(adguard, entry, description) for description in SENSORS],
|
||||
[AdGuardHomeSensor(data, entry, description) for description in SENSORS],
|
||||
True,
|
||||
)
|
||||
|
||||
|
@ -111,18 +104,18 @@ class AdGuardHomeSensor(AdGuardHomeEntity, SensorEntity):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
adguard: AdGuardHome,
|
||||
data: AdGuardData,
|
||||
entry: ConfigEntry,
|
||||
description: AdGuardHomeEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize AdGuard Home sensor."""
|
||||
super().__init__(adguard, entry)
|
||||
super().__init__(data, entry)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = "_".join(
|
||||
[
|
||||
DOMAIN,
|
||||
adguard.host,
|
||||
str(adguard.port),
|
||||
self.adguard.host,
|
||||
str(self.adguard.port),
|
||||
"sensor",
|
||||
description.key,
|
||||
]
|
||||
|
|
|
@ -7,15 +7,15 @@ from dataclasses import dataclass
|
|||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from adguardhome import AdGuardHome, AdGuardHomeConnectionError, AdGuardHomeError
|
||||
from adguardhome import AdGuardHome, AdGuardHomeError
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DATA_ADGUARD_CLIENT, DATA_ADGUARD_VERSION, DOMAIN, LOGGER
|
||||
from . import AdGuardData
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .entity import AdGuardHomeEntity
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
@ -83,17 +83,10 @@ async def async_setup_entry(
|
|||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AdGuard Home switch based on a config entry."""
|
||||
adguard = hass.data[DOMAIN][entry.entry_id][DATA_ADGUARD_CLIENT]
|
||||
|
||||
try:
|
||||
version = await adguard.version()
|
||||
except AdGuardHomeConnectionError as exception:
|
||||
raise PlatformNotReady from exception
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id][DATA_ADGUARD_VERSION] = version
|
||||
data: AdGuardData = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
async_add_entities(
|
||||
[AdGuardHomeSwitch(adguard, entry, description) for description in SWITCHES],
|
||||
[AdGuardHomeSwitch(data, entry, description) for description in SWITCHES],
|
||||
True,
|
||||
)
|
||||
|
||||
|
@ -105,15 +98,21 @@ class AdGuardHomeSwitch(AdGuardHomeEntity, SwitchEntity):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
adguard: AdGuardHome,
|
||||
data: AdGuardData,
|
||||
entry: ConfigEntry,
|
||||
description: AdGuardHomeSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize AdGuard Home switch."""
|
||||
super().__init__(adguard, entry)
|
||||
super().__init__(data, entry)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = "_".join(
|
||||
[DOMAIN, adguard.host, str(adguard.port), "switch", description.key]
|
||||
[
|
||||
DOMAIN,
|
||||
self.adguard.host,
|
||||
str(self.adguard.port),
|
||||
"switch",
|
||||
description.key,
|
||||
]
|
||||
)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
|
|
|
@ -26,9 +26,7 @@ async def async_get_config_entry_diagnostics(
|
|||
"""Return diagnostics for a config entry."""
|
||||
coordinator: AirlyDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
diagnostics_data = {
|
||||
return {
|
||||
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT),
|
||||
"coordinator_data": coordinator.data,
|
||||
}
|
||||
|
||||
return diagnostics_data
|
||||
|
|
|
@ -44,10 +44,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
|
||||
async def _async_update_method() -> AirthingsDevice:
|
||||
"""Get data from Airthings BLE."""
|
||||
ble_device = bluetooth.async_ble_device_from_address(hass, address)
|
||||
|
||||
try:
|
||||
data = await airthings.update_device(ble_device) # type: ignore[arg-type]
|
||||
data = await airthings.update_device(ble_device)
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||
|
||||
|
|
|
@ -87,7 +87,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
_LOGGER.error(
|
||||
"Unknown error occurred from %s: %s", discovery_info.address, err
|
||||
)
|
||||
raise err
|
||||
raise
|
||||
return data
|
||||
|
||||
async def async_step_bluetooth(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"config": {
|
||||
"flow_title": "[%key:component::bluetooth::config::flow_title%]",
|
||||
"flow_title": "{name}",
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
|
|
|
@ -8,7 +8,7 @@ from typing import Any
|
|||
from airtouch5py.airtouch5_simple_client import Airtouch5SimpleClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST
|
||||
|
||||
from .const import DOMAIN
|
||||
|
@ -18,14 +18,14 @@ _LOGGER = logging.getLogger(__name__)
|
|||
STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str})
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
class AirTouch5ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Airtouch 5."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> config_entries.ConfigFlowResult:
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] | None = None
|
||||
if user_input is not None:
|
||||
|
|
|
@ -16,7 +16,9 @@ from .coordinator import AirzoneUpdateCoordinator
|
|||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.WATER_HEATER,
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ from aioairzone_cloud.const import (
|
|||
AZD_AVAILABLE,
|
||||
AZD_FIRMWARE,
|
||||
AZD_GROUPS,
|
||||
AZD_HOT_WATERS,
|
||||
AZD_INSTALLATIONS,
|
||||
AZD_NAME,
|
||||
AZD_SYSTEM_ID,
|
||||
|
@ -136,6 +137,47 @@ class AirzoneGroupEntity(AirzoneEntity):
|
|||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||
|
||||
|
||||
class AirzoneHotWaterEntity(AirzoneEntity):
|
||||
"""Define an Airzone Cloud Hot Water entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
dhw_id: str,
|
||||
dhw_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.dhw_id = dhw_id
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, dhw_id)},
|
||||
manufacturer=MANUFACTURER,
|
||||
name=dhw_data[AZD_NAME],
|
||||
via_device=(DOMAIN, dhw_data[AZD_WEBSERVER]),
|
||||
)
|
||||
|
||||
def get_airzone_value(self, key: str) -> Any:
|
||||
"""Return DHW value by key."""
|
||||
value = None
|
||||
if dhw := self.coordinator.data[AZD_HOT_WATERS].get(self.dhw_id):
|
||||
value = dhw.get(key)
|
||||
return value
|
||||
|
||||
async def _async_update_params(self, params: dict[str, Any]) -> None:
|
||||
"""Send DHW parameters to Cloud API."""
|
||||
_LOGGER.debug("dhw=%s: update_params=%s", self.entity_id, params)
|
||||
try:
|
||||
await self.coordinator.airzone.api_set_dhw_id_params(self.dhw_id, params)
|
||||
except AirzoneCloudError as error:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set {self.entity_id} params: {error}"
|
||||
) from error
|
||||
|
||||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||
|
||||
|
||||
class AirzoneInstallationEntity(AirzoneEntity):
|
||||
"""Define an Airzone Cloud Installation entity."""
|
||||
|
||||
|
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.4.7"]
|
||||
"requirements": ["aioairzone-cloud==0.5.1"]
|
||||
}
|
||||
|
|
|
@ -0,0 +1,124 @@
|
|||
"""Support for the Airzone Cloud select."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
from aioairzone_cloud.common import AirQualityMode
|
||||
from aioairzone_cloud.const import (
|
||||
API_AQ_MODE_CONF,
|
||||
API_VALUE,
|
||||
AZD_AQ_MODE_CONF,
|
||||
AZD_ZONES,
|
||||
)
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirzoneUpdateCoordinator
|
||||
from .entity import AirzoneEntity, AirzoneZoneEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirzoneSelectDescription(SelectEntityDescription):
|
||||
"""Class to describe an Airzone select entity."""
|
||||
|
||||
api_param: str
|
||||
options_dict: dict[str, str]
|
||||
|
||||
|
||||
AIR_QUALITY_MAP: Final[dict[str, str]] = {
|
||||
"off": AirQualityMode.OFF,
|
||||
"on": AirQualityMode.ON,
|
||||
"auto": AirQualityMode.AUTO,
|
||||
}
|
||||
|
||||
|
||||
ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_AQ_MODE_CONF,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
key=AZD_AQ_MODE_CONF,
|
||||
options=list(AIR_QUALITY_MAP),
|
||||
options_dict=AIR_QUALITY_MAP,
|
||||
translation_key="air_quality",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Add Airzone Cloud select from a config_entry."""
|
||||
coordinator: AirzoneUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
# Zones
|
||||
async_add_entities(
|
||||
AirzoneZoneSelect(
|
||||
coordinator,
|
||||
description,
|
||||
zone_id,
|
||||
zone_data,
|
||||
)
|
||||
for description in ZONE_SELECT_TYPES
|
||||
for zone_id, zone_data in coordinator.data.get(AZD_ZONES, {}).items()
|
||||
if description.key in zone_data
|
||||
)
|
||||
|
||||
|
||||
class AirzoneBaseSelect(AirzoneEntity, SelectEntity):
|
||||
"""Define an Airzone Cloud select."""
|
||||
|
||||
entity_description: AirzoneSelectDescription
|
||||
values_dict: dict[str, str]
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Update attributes when the coordinator updates."""
|
||||
self._async_update_attrs()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
def _get_current_option(self) -> str | None:
|
||||
"""Get current selected option."""
|
||||
value = self.get_airzone_value(self.entity_description.key)
|
||||
return self.values_dict.get(value)
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update select attributes."""
|
||||
self._attr_current_option = self._get_current_option()
|
||||
|
||||
|
||||
class AirzoneZoneSelect(AirzoneZoneEntity, AirzoneBaseSelect):
|
||||
"""Define an Airzone Cloud Zone select."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
description: AirzoneSelectDescription,
|
||||
zone_id: str,
|
||||
zone_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator, zone_id, zone_data)
|
||||
|
||||
self._attr_unique_id = f"{zone_id}_{description.key}"
|
||||
self.entity_description = description
|
||||
self.values_dict = {v: k for k, v in description.options_dict.items()}
|
||||
|
||||
self._async_update_attrs()
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
param = self.entity_description.api_param
|
||||
value = self.entity_description.options_dict[option]
|
||||
params: dict[str, Any] = {}
|
||||
params[param] = {
|
||||
API_VALUE: value,
|
||||
}
|
||||
await self._async_update_params(params)
|
|
@ -21,6 +21,16 @@
|
|||
"air_quality_active": {
|
||||
"name": "Air Quality active"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"air_quality": {
|
||||
"name": "Air Quality mode",
|
||||
"state": {
|
||||
"off": "Off",
|
||||
"on": "On",
|
||||
"auto": "Auto"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,164 @@
|
|||
"""Support for the Airzone Cloud water heater."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Final
|
||||
|
||||
from aioairzone_cloud.common import HotWaterOperation, TemperatureUnit
|
||||
from aioairzone_cloud.const import (
|
||||
API_OPTS,
|
||||
API_POWER,
|
||||
API_POWERFUL_MODE,
|
||||
API_SETPOINT,
|
||||
API_UNITS,
|
||||
API_VALUE,
|
||||
AZD_HOT_WATERS,
|
||||
AZD_OPERATION,
|
||||
AZD_OPERATIONS,
|
||||
AZD_TEMP,
|
||||
AZD_TEMP_SET,
|
||||
AZD_TEMP_SET_MAX,
|
||||
AZD_TEMP_SET_MIN,
|
||||
)
|
||||
|
||||
from homeassistant.components.water_heater import (
|
||||
STATE_ECO,
|
||||
STATE_PERFORMANCE,
|
||||
WaterHeaterEntity,
|
||||
WaterHeaterEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_TEMPERATURE, STATE_OFF, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirzoneUpdateCoordinator
|
||||
from .entity import AirzoneHotWaterEntity
|
||||
|
||||
OPERATION_LIB_TO_HASS: Final[dict[HotWaterOperation, str]] = {
|
||||
HotWaterOperation.Off: STATE_OFF,
|
||||
HotWaterOperation.On: STATE_ECO,
|
||||
HotWaterOperation.Powerful: STATE_PERFORMANCE,
|
||||
}
|
||||
|
||||
OPERATION_MODE_TO_DHW_PARAMS: Final[dict[str, dict[str, Any]]] = {
|
||||
STATE_OFF: {
|
||||
API_POWER: {
|
||||
API_VALUE: False,
|
||||
},
|
||||
},
|
||||
STATE_ECO: {
|
||||
API_POWER: {
|
||||
API_VALUE: True,
|
||||
},
|
||||
API_POWERFUL_MODE: {
|
||||
API_VALUE: False,
|
||||
},
|
||||
},
|
||||
STATE_PERFORMANCE: {
|
||||
API_POWER: {
|
||||
API_VALUE: True,
|
||||
},
|
||||
API_POWERFUL_MODE: {
|
||||
API_VALUE: True,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Add Airzone Cloud Water Heater from a config_entry."""
|
||||
coordinator: AirzoneUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
async_add_entities(
|
||||
AirzoneWaterHeater(
|
||||
coordinator,
|
||||
dhw_id,
|
||||
dhw_data,
|
||||
)
|
||||
for dhw_id, dhw_data in coordinator.data.get(AZD_HOT_WATERS, {}).items()
|
||||
)
|
||||
|
||||
|
||||
class AirzoneWaterHeater(AirzoneHotWaterEntity, WaterHeaterEntity):
|
||||
"""Define an Airzone Cloud Water Heater."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_supported_features = (
|
||||
WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
dhw_id: str,
|
||||
dhw_data: dict,
|
||||
) -> None:
|
||||
"""Initialize Airzone Cloud Water Heater."""
|
||||
super().__init__(coordinator, dhw_id, dhw_data)
|
||||
|
||||
self._attr_unique_id = dhw_id
|
||||
self._attr_operation_list = [
|
||||
OPERATION_LIB_TO_HASS[operation]
|
||||
for operation in self.get_airzone_value(AZD_OPERATIONS)
|
||||
]
|
||||
|
||||
self._async_update_attrs()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the water heater off."""
|
||||
params = {
|
||||
API_POWER: {
|
||||
API_VALUE: False,
|
||||
},
|
||||
}
|
||||
await self._async_update_params(params)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the water heater off."""
|
||||
params = {
|
||||
API_POWER: {
|
||||
API_VALUE: True,
|
||||
},
|
||||
}
|
||||
await self._async_update_params(params)
|
||||
|
||||
async def async_set_operation_mode(self, operation_mode: str) -> None:
|
||||
"""Set new target operation mode."""
|
||||
params = OPERATION_MODE_TO_DHW_PARAMS.get(operation_mode, {})
|
||||
await self._async_update_params(params)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
params: dict[str, Any] = {}
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
params[API_SETPOINT] = {
|
||||
API_VALUE: kwargs[ATTR_TEMPERATURE],
|
||||
API_OPTS: {
|
||||
API_UNITS: TemperatureUnit.CELSIUS.value,
|
||||
},
|
||||
}
|
||||
await self._async_update_params(params)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Update attributes when the coordinator updates."""
|
||||
self._async_update_attrs()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update water heater attributes."""
|
||||
self._attr_current_temperature = self.get_airzone_value(AZD_TEMP)
|
||||
self._attr_current_operation = OPERATION_LIB_TO_HASS[
|
||||
self.get_airzone_value(AZD_OPERATION)
|
||||
]
|
||||
self._attr_max_temp = self.get_airzone_value(AZD_TEMP_SET_MAX)
|
||||
self._attr_min_temp = self.get_airzone_value(AZD_TEMP_SET_MIN)
|
||||
self._attr_target_temperature = self.get_airzone_value(AZD_TEMP_SET)
|
|
@ -41,8 +41,8 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
|||
)
|
||||
try:
|
||||
await acc.login()
|
||||
except (ClientError, TimeoutError, Aladdin.ConnectionError) as ex:
|
||||
raise ex
|
||||
except (ClientError, TimeoutError, Aladdin.ConnectionError):
|
||||
raise
|
||||
|
||||
except Aladdin.InvalidPasswordError as ex:
|
||||
raise InvalidAuth from ex
|
||||
|
|
|
@ -23,8 +23,6 @@ async def async_get_config_entry_diagnostics(
|
|||
|
||||
acc: AladdinConnectClient = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
diagnostics_data = {
|
||||
return {
|
||||
"doors": async_redact_data(acc.doors, TO_REDACT),
|
||||
}
|
||||
|
||||
return diagnostics_data
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from functools import cached_property, partial
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Final, final
|
||||
from typing import Any, Final, final
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
|
@ -50,11 +50,6 @@ from .const import ( # noqa: F401
|
|||
CodeFormat,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from functools import cached_property
|
||||
else:
|
||||
from homeassistant.backports.functools import cached_property
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL: Final = timedelta(seconds=30)
|
||||
|
|
|
@ -26,13 +26,12 @@ from homeassistant.const import (
|
|||
STATE_OFF,
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.core import Event, HassJob, HomeAssistant
|
||||
from homeassistant.core import Event, EventStateChangedData, HassJob, HomeAssistant
|
||||
from homeassistant.exceptions import ServiceNotFound
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.event import (
|
||||
EventStateChangedData,
|
||||
async_track_point_in_time,
|
||||
async_track_state_change_event,
|
||||
)
|
||||
|
|
|
@ -300,6 +300,10 @@ class Alexa(AlexaCapability):
|
|||
The API suggests you should explicitly include this interface.
|
||||
|
||||
https://developer.amazon.com/docs/device-apis/alexa-interface.html
|
||||
|
||||
To compare current supported locales in Home Assistant
|
||||
with Alexa supported locales, run the following script:
|
||||
python -m script.alexa_locales
|
||||
"""
|
||||
|
||||
supported_locales = {
|
||||
|
@ -1764,10 +1768,7 @@ class AlexaRangeController(AlexaCapability):
|
|||
speed_list = self.entity.attributes.get(vacuum.ATTR_FAN_SPEED_LIST)
|
||||
speed = self.entity.attributes.get(vacuum.ATTR_FAN_SPEED)
|
||||
if speed_list is not None and speed is not None:
|
||||
speed_index = next(
|
||||
(i for i, v in enumerate(speed_list) if v == speed), None
|
||||
)
|
||||
return speed_index
|
||||
return next((i for i, v in enumerate(speed_list) if v == speed), None)
|
||||
|
||||
# Valve Position
|
||||
if self.instance == f"{valve.DOMAIN}.{valve.ATTR_POSITION}":
|
||||
|
|
|
@ -13,6 +13,7 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
|||
from homeassistant.helpers.storage import Store
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entities import TRANSLATION_TABLE
|
||||
from .state_report import async_enable_proactive_mode
|
||||
|
||||
STORE_AUTHORIZED = "authorized"
|
||||
|
@ -101,6 +102,10 @@ class AbstractConfig(ABC):
|
|||
"""If an entity should be exposed."""
|
||||
return False
|
||||
|
||||
def generate_alexa_id(self, entity_id: str) -> str:
|
||||
"""Return the alexa ID for an entity ID."""
|
||||
return entity_id.replace(".", "#").translate(TRANSLATION_TABLE)
|
||||
|
||||
@callback
|
||||
def async_invalidate_access_token(self) -> None:
|
||||
"""Invalidate access token."""
|
||||
|
|
|
@ -259,11 +259,6 @@ class DisplayCategory:
|
|||
WEARABLE = "WEARABLE"
|
||||
|
||||
|
||||
def generate_alexa_id(entity_id: str) -> str:
|
||||
"""Return the alexa ID for an entity ID."""
|
||||
return entity_id.replace(".", "#").translate(TRANSLATION_TABLE)
|
||||
|
||||
|
||||
class AlexaEntity:
|
||||
"""An adaptation of an entity, expressed in Alexa's terms.
|
||||
|
||||
|
@ -298,7 +293,7 @@ class AlexaEntity:
|
|||
|
||||
def alexa_id(self) -> str:
|
||||
"""Return the Alexa API entity id."""
|
||||
return generate_alexa_id(self.entity.entity_id)
|
||||
return self.config.generate_alexa_id(self.entity.entity_id)
|
||||
|
||||
def display_categories(self) -> list[str] | None:
|
||||
"""Return a list of display categories."""
|
||||
|
@ -384,10 +379,8 @@ def async_get_entities(
|
|||
try:
|
||||
alexa_entity = ENTITY_ADAPTERS[state.domain](hass, config, state)
|
||||
interfaces = list(alexa_entity.interfaces())
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"Unable to serialize %s for discovery: %s", state.entity_id, exc
|
||||
)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unable to serialize %s for discovery", state.entity_id)
|
||||
else:
|
||||
if not interfaces:
|
||||
continue
|
||||
|
|
|
@ -126,9 +126,9 @@ async def async_api_discovery(
|
|||
continue
|
||||
try:
|
||||
discovered_serialized_entity = alexa_entity.serialize_discovery()
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"Unable to serialize %s for discovery: %s", alexa_entity.entity_id, exc
|
||||
"Unable to serialize %s for discovery", alexa_entity.entity_id
|
||||
)
|
||||
else:
|
||||
discovery_endpoints.append(discovered_serialized_entity)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""Support for Alexa skill service end point."""
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
import enum
|
||||
import logging
|
||||
from typing import Any
|
||||
|
@ -16,7 +17,9 @@ from .const import DOMAIN, SYN_RESOLUTION_MATCH
|
|||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HANDLERS = Registry() # type: ignore[var-annotated]
|
||||
HANDLERS: Registry[
|
||||
str, Callable[[HomeAssistant, dict[str, Any]], Coroutine[Any, Any, dict[str, Any]]]
|
||||
] = Registry()
|
||||
|
||||
INTENTS_API_ENDPOINT = "/api/alexa"
|
||||
|
||||
|
@ -94,8 +97,8 @@ class AlexaIntentsView(http.HomeAssistantView):
|
|||
)
|
||||
)
|
||||
|
||||
except intent.IntentError as err:
|
||||
_LOGGER.exception(str(err))
|
||||
except intent.IntentError:
|
||||
_LOGGER.exception("Error handling intent")
|
||||
return self.json(
|
||||
intent_error_response(hass, message, "Error handling intent.")
|
||||
)
|
||||
|
@ -129,8 +132,7 @@ async def async_handle_message(
|
|||
if not (handler := HANDLERS.get(req_type)):
|
||||
raise UnknownRequest(f"Received unknown request {req_type}")
|
||||
|
||||
response: dict[str, Any] = await handler(hass, message)
|
||||
return response
|
||||
return await handler(hass, message)
|
||||
|
||||
|
||||
@HANDLERS.register("SessionEndedRequest")
|
||||
|
|
|
@ -291,9 +291,9 @@ class AlexaPresetResource(AlexaCapabilityResource):
|
|||
def __init__(
|
||||
self,
|
||||
labels: list[str],
|
||||
min_value: int | float,
|
||||
max_value: int | float,
|
||||
precision: int | float,
|
||||
min_value: float,
|
||||
max_value: float,
|
||||
precision: float,
|
||||
unit: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize an Alexa presetResource."""
|
||||
|
@ -306,7 +306,7 @@ class AlexaPresetResource(AlexaCapabilityResource):
|
|||
if unit in AlexaGlobalCatalog.__dict__.values():
|
||||
self._unit_of_measure = unit
|
||||
|
||||
def add_preset(self, value: int | float, labels: list[str]) -> None:
|
||||
def add_preset(self, value: float, labels: list[str]) -> None:
|
||||
"""Add preset to configuration presets array."""
|
||||
self._presets.append({"value": value, "labels": labels})
|
||||
|
||||
|
@ -405,7 +405,7 @@ class AlexaSemantics:
|
|||
)
|
||||
|
||||
def add_states_to_range(
|
||||
self, states: list[str], min_value: int | float, max_value: int | float
|
||||
self, states: list[str], min_value: float, max_value: float
|
||||
) -> None:
|
||||
"""Add StatesToRange stateMappings."""
|
||||
self._add_state_mapping(
|
||||
|
|
|
@ -13,10 +13,16 @@ from uuid import uuid4
|
|||
import aiohttp
|
||||
|
||||
from homeassistant.components import event
|
||||
from homeassistant.const import MATCH_ALL, STATE_ON
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback
|
||||
from homeassistant.const import EVENT_STATE_CHANGED, STATE_ON
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
HomeAssistant,
|
||||
State,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_track_state_change
|
||||
from homeassistant.helpers.significant_change import create_checker
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
@ -35,7 +41,7 @@ from .const import (
|
|||
Cause,
|
||||
)
|
||||
from .diagnostics import async_redact_auth_data
|
||||
from .entities import ENTITY_ADAPTERS, AlexaEntity, generate_alexa_id
|
||||
from .entities import ENTITY_ADAPTERS, AlexaEntity
|
||||
from .errors import AlexaInvalidEndpointError, NoTokenAvailable, RequireRelink
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
@ -265,28 +271,35 @@ async def async_enable_proactive_mode(
|
|||
|
||||
checker = await create_checker(hass, DOMAIN, extra_significant_check)
|
||||
|
||||
async def async_entity_state_listener(
|
||||
changed_entity: str,
|
||||
old_state: State | None,
|
||||
new_state: State | None,
|
||||
) -> None:
|
||||
@callback
|
||||
def _async_entity_state_filter(data: EventStateChangedData) -> bool:
|
||||
if not hass.is_running:
|
||||
return
|
||||
return False
|
||||
|
||||
if not new_state:
|
||||
return
|
||||
if not (new_state := data["new_state"]):
|
||||
return False
|
||||
|
||||
if new_state.domain not in ENTITY_ADAPTERS:
|
||||
return
|
||||
return False
|
||||
|
||||
changed_entity = data["entity_id"]
|
||||
if not smart_home_config.should_expose(changed_entity):
|
||||
_LOGGER.debug("Not exposing %s because filtered by config", changed_entity)
|
||||
return
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def _async_entity_state_listener(
|
||||
event_: Event[EventStateChangedData],
|
||||
) -> None:
|
||||
data = event_.data
|
||||
new_state = data["new_state"]
|
||||
if TYPE_CHECKING:
|
||||
assert new_state is not None
|
||||
|
||||
alexa_changed_entity: AlexaEntity = ENTITY_ADAPTERS[new_state.domain](
|
||||
hass, smart_home_config, new_state
|
||||
)
|
||||
|
||||
# Determine how entity should be reported on
|
||||
should_report = False
|
||||
should_doorbell = False
|
||||
|
@ -303,6 +316,7 @@ async def async_enable_proactive_mode(
|
|||
return
|
||||
|
||||
if should_doorbell:
|
||||
old_state = data["old_state"]
|
||||
if (
|
||||
new_state.domain == event.DOMAIN
|
||||
or new_state.state == STATE_ON
|
||||
|
@ -324,7 +338,11 @@ async def async_enable_proactive_mode(
|
|||
hass, smart_home_config, alexa_changed_entity, alexa_properties
|
||||
)
|
||||
|
||||
return async_track_state_change(hass, MATCH_ALL, async_entity_state_listener)
|
||||
return hass.bus.async_listen(
|
||||
EVENT_STATE_CHANGED,
|
||||
_async_entity_state_listener,
|
||||
event_filter=_async_entity_state_filter,
|
||||
)
|
||||
|
||||
|
||||
async def async_send_changereport_message(
|
||||
|
@ -474,7 +492,7 @@ async def async_send_delete_message(
|
|||
if domain not in ENTITY_ADAPTERS:
|
||||
continue
|
||||
|
||||
endpoints.append({"endpointId": generate_alexa_id(entity_id)})
|
||||
endpoints.append({"endpointId": config.generate_alexa_id(entity_id)})
|
||||
|
||||
payload: dict[str, Any] = {
|
||||
"endpoints": endpoints,
|
||||
|
|
|
@ -5,5 +5,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/amazon_polly",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["boto3", "botocore", "s3transfer"],
|
||||
"requirements": ["boto3==1.33.13"]
|
||||
"requirements": ["boto3==1.34.51"]
|
||||
}
|
||||
|
|
|
@ -60,10 +60,6 @@ class AmberElectricConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
try:
|
||||
sites: list[Site] = filter_sites(api.get_sites())
|
||||
if len(sites) == 0:
|
||||
self._errors[CONF_API_TOKEN] = "no_site"
|
||||
return None
|
||||
return sites
|
||||
except amberelectric.ApiException as api_exception:
|
||||
if api_exception.status == 403:
|
||||
self._errors[CONF_API_TOKEN] = "invalid_api_token"
|
||||
|
@ -71,6 +67,11 @@ class AmberElectricConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
self._errors[CONF_API_TOKEN] = "unknown_error"
|
||||
return None
|
||||
|
||||
if len(sites) == 0:
|
||||
self._errors[CONF_API_TOKEN] = "no_site"
|
||||
return None
|
||||
return sites
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
"""The Ambient Weather Network integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aioambient.open_api import OpenAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AmbientNetworkDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up the Ambient Weather Network from a config entry."""
|
||||
|
||||
api = OpenAPI()
|
||||
coordinator = AmbientNetworkDataUpdateCoordinator(hass, api)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
|
@ -0,0 +1,152 @@
|
|||
"""Config flow for the Ambient Weather Network integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aioambient import OpenAPI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_LATITUDE,
|
||||
CONF_LOCATION,
|
||||
CONF_LONGITUDE,
|
||||
CONF_MAC,
|
||||
CONF_RADIUS,
|
||||
UnitOfLength,
|
||||
)
|
||||
from homeassistant.helpers.selector import (
|
||||
LocationSelector,
|
||||
LocationSelectorConfig,
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
)
|
||||
from homeassistant.util.unit_conversion import DistanceConverter
|
||||
|
||||
from .const import API_STATION_INDOOR, API_STATION_INFO, API_STATION_MAC_ADDRESS, DOMAIN
|
||||
from .helper import get_station_name
|
||||
|
||||
CONF_USER = "user"
|
||||
CONF_STATION = "station"
|
||||
|
||||
# One mile
|
||||
CONF_RADIUS_DEFAULT = 1609.34
|
||||
|
||||
|
||||
class AmbientNetworkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the config flow for the Ambient Weather Network integration."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Construct the config flow."""
|
||||
|
||||
self._longitude = 0.0
|
||||
self._latitude = 0.0
|
||||
self._radius = 0.0
|
||||
self._stations: dict[str, dict[str, Any]] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step to select the location."""
|
||||
|
||||
errors: dict[str, str] | None = None
|
||||
if user_input:
|
||||
self._latitude = user_input[CONF_LOCATION][CONF_LATITUDE]
|
||||
self._longitude = user_input[CONF_LOCATION][CONF_LONGITUDE]
|
||||
self._radius = user_input[CONF_LOCATION][CONF_RADIUS]
|
||||
|
||||
client: OpenAPI = OpenAPI()
|
||||
self._stations = {
|
||||
x[API_STATION_MAC_ADDRESS]: x
|
||||
for x in await client.get_devices_by_location(
|
||||
self._latitude,
|
||||
self._longitude,
|
||||
radius=DistanceConverter.convert(
|
||||
self._radius,
|
||||
UnitOfLength.METERS,
|
||||
UnitOfLength.MILES,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
# Filter out indoor stations
|
||||
self._stations = dict(
|
||||
filter(
|
||||
lambda item: not item[1]
|
||||
.get(API_STATION_INFO, {})
|
||||
.get(API_STATION_INDOOR, False),
|
||||
self._stations.items(),
|
||||
)
|
||||
)
|
||||
|
||||
if self._stations:
|
||||
return await self.async_step_station()
|
||||
|
||||
errors = {"base": "no_stations_found"}
|
||||
|
||||
schema: vol.Schema = self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_LOCATION,
|
||||
): LocationSelector(LocationSelectorConfig(radius=True)),
|
||||
}
|
||||
),
|
||||
{
|
||||
CONF_LOCATION: {
|
||||
CONF_LATITUDE: self.hass.config.latitude,
|
||||
CONF_LONGITUDE: self.hass.config.longitude,
|
||||
CONF_RADIUS: CONF_RADIUS_DEFAULT,
|
||||
}
|
||||
if not errors
|
||||
else {
|
||||
CONF_LATITUDE: self._latitude,
|
||||
CONF_LONGITUDE: self._longitude,
|
||||
CONF_RADIUS: self._radius,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id=CONF_USER, data_schema=schema, errors=errors if errors else {}
|
||||
)
|
||||
|
||||
async def async_step_station(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the second step to select the station."""
|
||||
|
||||
if user_input:
|
||||
mac_address = user_input[CONF_STATION]
|
||||
await self.async_set_unique_id(mac_address)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=get_station_name(self._stations[mac_address]),
|
||||
data={CONF_MAC: mac_address},
|
||||
)
|
||||
|
||||
options: list[SelectOptionDict] = [
|
||||
SelectOptionDict(
|
||||
label=get_station_name(station),
|
||||
value=mac_address,
|
||||
)
|
||||
for mac_address, station in self._stations.items()
|
||||
]
|
||||
|
||||
schema: vol.Schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_STATION): SelectSelector(
|
||||
SelectSelectorConfig(options=options, multiple=False, sort=True),
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id=CONF_STATION,
|
||||
data_schema=schema,
|
||||
)
|
|
@ -0,0 +1,16 @@
|
|||
"""Constants for the Ambient Weather Network integration."""
|
||||
|
||||
import logging
|
||||
|
||||
DOMAIN = "ambient_network"
|
||||
|
||||
API_LAST_DATA = "lastData"
|
||||
API_STATION_COORDS = "coords"
|
||||
API_STATION_INDOOR = "indoor"
|
||||
API_STATION_INFO = "info"
|
||||
API_STATION_LOCATION = "location"
|
||||
API_STATION_NAME = "name"
|
||||
API_STATION_MAC_ADDRESS = "macAddress"
|
||||
API_STATION_TYPE = "stationtype"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
|
@ -0,0 +1,65 @@
|
|||
"""DataUpdateCoordinator for the Ambient Weather Network integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, cast
|
||||
|
||||
from aioambient import OpenAPI
|
||||
from aioambient.errors import RequestError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_MAC
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import API_LAST_DATA, DOMAIN, LOGGER
|
||||
from .helper import get_station_name
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
class AmbientNetworkDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""The Ambient Network Data Update Coordinator."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
station_name: str
|
||||
|
||||
def __init__(self, hass: HomeAssistant, api: OpenAPI) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(hass, LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL)
|
||||
self.api = api
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Fetch the latest data from the Ambient Network."""
|
||||
|
||||
try:
|
||||
response = await self.api.get_device_details(
|
||||
self.config_entry.data[CONF_MAC]
|
||||
)
|
||||
except RequestError as ex:
|
||||
raise UpdateFailed("Cannot connect to Ambient Network") from ex
|
||||
|
||||
self.station_name = get_station_name(response)
|
||||
|
||||
if (last_data := response.get(API_LAST_DATA)) is None:
|
||||
raise UpdateFailed(
|
||||
f"Station '{self.config_entry.title}' did not report any data"
|
||||
)
|
||||
|
||||
# Eliminate data if the station hasn't been updated for a while.
|
||||
if (created_at := last_data.get("created_at")) is None:
|
||||
raise UpdateFailed(
|
||||
f"Station '{self.config_entry.title}' did not report a time stamp"
|
||||
)
|
||||
|
||||
# Eliminate data that has been generated more than an hour ago. The station is
|
||||
# probably offline.
|
||||
if int(created_at / 1000) < int(
|
||||
(datetime.now() - timedelta(hours=1)).timestamp()
|
||||
):
|
||||
raise UpdateFailed(
|
||||
f"Station '{self.config_entry.title}' reported stale data"
|
||||
)
|
||||
|
||||
return cast(dict[str, Any], last_data)
|
|
@ -0,0 +1,50 @@
|
|||
"""Base entity class for the Ambient Weather Network integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AmbientNetworkDataUpdateCoordinator
|
||||
|
||||
|
||||
class AmbientNetworkEntity(CoordinatorEntity[AmbientNetworkDataUpdateCoordinator]):
|
||||
"""Entity class for Ambient network devices."""
|
||||
|
||||
_attr_attribution = "Data provided by ambientnetwork.net"
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AmbientNetworkDataUpdateCoordinator,
|
||||
description: EntityDescription,
|
||||
mac_address: str,
|
||||
) -> None:
|
||||
"""Initialize the Ambient network entity."""
|
||||
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{mac_address}_{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
name=coordinator.station_name,
|
||||
identifiers={(DOMAIN, mac_address)},
|
||||
manufacturer="Ambient Weather",
|
||||
)
|
||||
self._update_attrs()
|
||||
|
||||
@abstractmethod
|
||||
def _update_attrs(self) -> None:
|
||||
"""Update state attributes."""
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Get the latest data and updates the state."""
|
||||
|
||||
self._update_attrs()
|
||||
super()._handle_coordinator_update()
|
|
@ -0,0 +1,31 @@
|
|||
"""Helper class for the Ambient Weather Network integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from .const import (
|
||||
API_LAST_DATA,
|
||||
API_STATION_COORDS,
|
||||
API_STATION_INFO,
|
||||
API_STATION_LOCATION,
|
||||
API_STATION_NAME,
|
||||
API_STATION_TYPE,
|
||||
)
|
||||
|
||||
|
||||
def get_station_name(station: dict[str, Any]) -> str:
|
||||
"""Pick a station name.
|
||||
|
||||
Station names can be empty, in which case we construct the name from
|
||||
the location and device type.
|
||||
"""
|
||||
if name := station.get(API_STATION_INFO, {}).get(API_STATION_NAME):
|
||||
return str(name)
|
||||
location = (
|
||||
station.get(API_STATION_INFO, {})
|
||||
.get(API_STATION_COORDS, {})
|
||||
.get(API_STATION_LOCATION)
|
||||
)
|
||||
station_type = station.get(API_LAST_DATA, {}).get(API_STATION_TYPE)
|
||||
return f"{location}{'' if location is None or station_type is None else ' '}{station_type}"
|
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"last_rain": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
"lightning_strikes_per_day": {
|
||||
"default": "mdi:lightning-bolt"
|
||||
},
|
||||
"lightning_strikes_per_hour": {
|
||||
"default": "mdi:lightning-bolt"
|
||||
},
|
||||
"lightning_distance": {
|
||||
"default": "mdi:lightning-bolt"
|
||||
},
|
||||
"wind_direction": {
|
||||
"default": "mdi:compass-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"domain": "ambient_network",
|
||||
"name": "Ambient Weather Network",
|
||||
"codeowners": ["@thomaskistler"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ambient_network",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioambient"],
|
||||
"requirements": ["aioambient==2024.01.0"]
|
||||
}
|
|
@ -0,0 +1,315 @@
|
|||
"""Support for Ambient Weather Network sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONF_MAC,
|
||||
DEGREE,
|
||||
PERCENTAGE,
|
||||
UnitOfIrradiance,
|
||||
UnitOfLength,
|
||||
UnitOfPrecipitationDepth,
|
||||
UnitOfPressure,
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
UnitOfVolumetricFlux,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AmbientNetworkDataUpdateCoordinator
|
||||
from .entity import AmbientNetworkEntity
|
||||
|
||||
TYPE_AQI_PM25 = "aqi_pm25"
|
||||
TYPE_AQI_PM25_24H = "aqi_pm25_24h"
|
||||
TYPE_BAROMABSIN = "baromabsin"
|
||||
TYPE_BAROMRELIN = "baromrelin"
|
||||
TYPE_CO2 = "co2"
|
||||
TYPE_DAILYRAININ = "dailyrainin"
|
||||
TYPE_DEWPOINT = "dewPoint"
|
||||
TYPE_EVENTRAININ = "eventrainin"
|
||||
TYPE_FEELSLIKE = "feelsLike"
|
||||
TYPE_HOURLYRAININ = "hourlyrainin"
|
||||
TYPE_HUMIDITY = "humidity"
|
||||
TYPE_LASTRAIN = "lastRain"
|
||||
TYPE_LIGHTNING_DISTANCE = "lightning_distance"
|
||||
TYPE_LIGHTNING_PER_DAY = "lightning_day"
|
||||
TYPE_LIGHTNING_PER_HOUR = "lightning_hour"
|
||||
TYPE_MAXDAILYGUST = "maxdailygust"
|
||||
TYPE_MONTHLYRAININ = "monthlyrainin"
|
||||
TYPE_PM25 = "pm25"
|
||||
TYPE_PM25_24H = "pm25_24h"
|
||||
TYPE_SOLARRADIATION = "solarradiation"
|
||||
TYPE_TEMPF = "tempf"
|
||||
TYPE_UV = "uv"
|
||||
TYPE_WEEKLYRAININ = "weeklyrainin"
|
||||
TYPE_WINDDIR = "winddir"
|
||||
TYPE_WINDGUSTMPH = "windgustmph"
|
||||
TYPE_WINDSPEEDMPH = "windspeedmph"
|
||||
TYPE_YEARLYRAININ = "yearlyrainin"
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = (
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM25,
|
||||
translation_key="pm25_aqi",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM25_24H,
|
||||
translation_key="pm25_aqi_24h_average",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_BAROMABSIN,
|
||||
translation_key="absolute_pressure",
|
||||
native_unit_of_measurement=UnitOfPressure.INHG,
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_BAROMRELIN,
|
||||
translation_key="relative_pressure",
|
||||
native_unit_of_measurement=UnitOfPressure.INHG,
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_DAILYRAININ,
|
||||
translation_key="daily_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_DEWPOINT,
|
||||
translation_key="dew_point",
|
||||
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_FEELSLIKE,
|
||||
translation_key="feels_like",
|
||||
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_HOURLYRAININ,
|
||||
translation_key="hourly_rain",
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.INCHES_PER_HOUR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_LASTRAIN,
|
||||
translation_key="last_rain",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_LIGHTNING_PER_DAY,
|
||||
translation_key="lightning_strikes_per_day",
|
||||
native_unit_of_measurement="strikes",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_LIGHTNING_PER_HOUR,
|
||||
translation_key="lightning_strikes_per_hour",
|
||||
native_unit_of_measurement="strikes/hour",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_LIGHTNING_DISTANCE,
|
||||
translation_key="lightning_distance",
|
||||
native_unit_of_measurement=UnitOfLength.MILES,
|
||||
device_class=SensorDeviceClass.DISTANCE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_MAXDAILYGUST,
|
||||
translation_key="max_daily_gust",
|
||||
native_unit_of_measurement=UnitOfSpeed.MILES_PER_HOUR,
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_MONTHLYRAININ,
|
||||
translation_key="monthly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM25_24H,
|
||||
translation_key="pm25_24h_average",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM25,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_SOLARRADIATION,
|
||||
native_unit_of_measurement=UnitOfIrradiance.WATTS_PER_SQUARE_METER,
|
||||
device_class=SensorDeviceClass.IRRADIANCE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_TEMPF,
|
||||
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_UV,
|
||||
translation_key="uv_index",
|
||||
native_unit_of_measurement="index",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WEEKLYRAININ,
|
||||
translation_key="weekly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDDIR,
|
||||
translation_key="wind_direction",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
suggested_display_precision=0,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDGUSTMPH,
|
||||
translation_key="wind_gust",
|
||||
native_unit_of_measurement=UnitOfSpeed.MILES_PER_HOUR,
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDSPEEDMPH,
|
||||
native_unit_of_measurement=UnitOfSpeed.MILES_PER_HOUR,
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_YEARLYRAININ,
|
||||
translation_key="yearly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Ambient Network sensor entities."""
|
||||
|
||||
coordinator: AmbientNetworkDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
if coordinator.config_entry is not None:
|
||||
async_add_entities(
|
||||
AmbientNetworkSensor(
|
||||
coordinator,
|
||||
description,
|
||||
coordinator.config_entry.data[CONF_MAC],
|
||||
)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
if coordinator.data.get(description.key) is not None
|
||||
)
|
||||
|
||||
|
||||
class AmbientNetworkSensor(AmbientNetworkEntity, SensorEntity):
|
||||
"""A sensor implementation for an Ambient Weather Network sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AmbientNetworkDataUpdateCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
mac_address: str,
|
||||
) -> None:
|
||||
"""Initialize a sensor object."""
|
||||
|
||||
super().__init__(coordinator, description, mac_address)
|
||||
|
||||
def _update_attrs(self) -> None:
|
||||
"""Update sensor attributes."""
|
||||
|
||||
value = self.coordinator.data.get(self.entity_description.key)
|
||||
|
||||
# Treatments for special units.
|
||||
if value is not None and self.device_class == SensorDeviceClass.TIMESTAMP:
|
||||
value = datetime.fromtimestamp(value / 1000, tz=dt_util.DEFAULT_TIME_ZONE)
|
||||
|
||||
self._attr_available = value is not None
|
||||
self._attr_native_value = value
|
|
@ -0,0 +1,87 @@
|
|||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Select region",
|
||||
"description": "Choose the region you want to survey in order to locate Ambient personal weather stations."
|
||||
},
|
||||
"station": {
|
||||
"title": "Select station",
|
||||
"description": "Select the weather station you want to add to Home Assistant.",
|
||||
"data": {
|
||||
"station": "Station"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"no_stations_found": "Did not find any stations in the selected region."
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"pm25_24h_average": {
|
||||
"name": "PM2.5 (24 hour average)"
|
||||
},
|
||||
"pm25_aqi": {
|
||||
"name": "PM2.5 AQI"
|
||||
},
|
||||
"pm25_aqi_24h_average": {
|
||||
"name": "PM2.5 AQI (24 hour average)"
|
||||
},
|
||||
"absolute_pressure": {
|
||||
"name": "Absolute pressure"
|
||||
},
|
||||
"relative_pressure": {
|
||||
"name": "Relative pressure"
|
||||
},
|
||||
"daily_rain": {
|
||||
"name": "Daily rain"
|
||||
},
|
||||
"dew_point": {
|
||||
"name": "Dew point"
|
||||
},
|
||||
"feels_like": {
|
||||
"name": "Feels like"
|
||||
},
|
||||
"hourly_rain": {
|
||||
"name": "Hourly rain"
|
||||
},
|
||||
"last_rain": {
|
||||
"name": "Last rain"
|
||||
},
|
||||
"lightning_strikes_per_day": {
|
||||
"name": "Lightning strikes per day"
|
||||
},
|
||||
"lightning_strikes_per_hour": {
|
||||
"name": "Lightning strikes per hour"
|
||||
},
|
||||
"lightning_distance": {
|
||||
"name": "Lightning distance"
|
||||
},
|
||||
"max_daily_gust": {
|
||||
"name": "Max daily gust"
|
||||
},
|
||||
"monthly_rain": {
|
||||
"name": "Monthly rain"
|
||||
},
|
||||
"uv_index": {
|
||||
"name": "UV index"
|
||||
},
|
||||
"weekly_rain": {
|
||||
"name": "Weekly rain"
|
||||
},
|
||||
"wind_direction": {
|
||||
"name": "Wind direction"
|
||||
},
|
||||
"wind_gust": {
|
||||
"name": "Wind gust"
|
||||
},
|
||||
"yearly_rain": {
|
||||
"name": "Yearly rain"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -93,7 +93,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
ambient = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
hass.async_create_task(ambient.ws_disconnect())
|
||||
hass.async_create_task(ambient.ws_disconnect(), eager_start=True)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
@ -179,7 +179,8 @@ class AmbientStation:
|
|||
self._hass.async_create_task(
|
||||
self._hass.config_entries.async_forward_entry_setups(
|
||||
self._entry, PLATFORMS
|
||||
)
|
||||
),
|
||||
eager_start=True,
|
||||
)
|
||||
self._entry_setup_complete = True
|
||||
self._ws_reconnect_delay = DEFAULT_SOCKET_MIN_RETRY
|
||||
|
|
|
@ -49,7 +49,7 @@ class AmbientWeatherEntity(Entity):
|
|||
last_data = self._ambient.stations[self._mac_address][ATTR_LAST_DATA]
|
||||
key = self.entity_description.key
|
||||
available_key = TYPE_SOLARRADIATION if key == TYPE_SOLARRADIATION_LX else key
|
||||
self._attr_available = last_data[available_key] is not None
|
||||
self._attr_available = last_data.get(available_key) is not None
|
||||
self.update_from_latest_data()
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
|
@ -19,6 +19,7 @@ from homeassistant.const import (
|
|||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
UnitOfIrradiance,
|
||||
UnitOfLength,
|
||||
UnitOfPrecipitationDepth,
|
||||
UnitOfPressure,
|
||||
UnitOfSpeed,
|
||||
|
@ -61,6 +62,8 @@ TYPE_HUMIDITYIN = "humidityin"
|
|||
TYPE_LASTRAIN = "lastRain"
|
||||
TYPE_LIGHTNING_PER_DAY = "lightning_day"
|
||||
TYPE_LIGHTNING_PER_HOUR = "lightning_hour"
|
||||
TYPE_LASTLIGHTNING_DISTANCE = "lightning_distance"
|
||||
TYPE_LASTLIGHTNING = "lightning_time"
|
||||
TYPE_MAXDAILYGUST = "maxdailygust"
|
||||
TYPE_MONTHLYRAININ = "monthlyrainin"
|
||||
TYPE_PM25 = "pm25"
|
||||
|
@ -296,6 +299,18 @@ SENSOR_DESCRIPTIONS = (
|
|||
native_unit_of_measurement="strikes",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_LASTLIGHTNING,
|
||||
translation_key="last_lightning_strike",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_LASTLIGHTNING_DISTANCE,
|
||||
translation_key="last_lightning_strike_distance",
|
||||
native_unit_of_measurement=UnitOfLength.MILES,
|
||||
device_class=SensorDeviceClass.DISTANCE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_MAXDAILYGUST,
|
||||
translation_key="max_gust",
|
||||
|
@ -685,5 +700,9 @@ class AmbientWeatherSensor(AmbientWeatherEntity, SensorEntity):
|
|||
raw = self._ambient.stations[self._mac_address][ATTR_LAST_DATA][key]
|
||||
if key == TYPE_LASTRAIN:
|
||||
self._attr_native_value = datetime.strptime(raw, "%Y-%m-%dT%H:%M:%S.%f%z")
|
||||
elif key == TYPE_LASTLIGHTNING:
|
||||
self._attr_native_value = datetime.fromtimestamp(
|
||||
raw / 1000, tz=UTC
|
||||
) # Ambient uses millisecond epoch
|
||||
else:
|
||||
self._attr_native_value = raw
|
||||
|
|
|
@ -219,6 +219,12 @@
|
|||
"last_rain": {
|
||||
"name": "Last rain"
|
||||
},
|
||||
"last_lightning_strike": {
|
||||
"name": "Last Lightning strike"
|
||||
},
|
||||
"last_lightning_strike_distance": {
|
||||
"name": "Last Lightning strike distance"
|
||||
},
|
||||
"lightning_strikes_per_day": {
|
||||
"name": "Lightning strikes per day"
|
||||
},
|
||||
|
|
|
@ -203,8 +203,7 @@ class AmcrestChecker(ApiWrapper):
|
|||
async def async_command(self, *args: Any, **kwargs: Any) -> httpx.Response:
|
||||
"""amcrest.ApiWrapper.command wrapper to catch errors."""
|
||||
async with self._async_command_wrapper():
|
||||
ret = await super().async_command(*args, **kwargs)
|
||||
return ret
|
||||
return await super().async_command(*args, **kwargs)
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_stream_command(
|
||||
|
|
|
@ -49,9 +49,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = AnalyticsInsightsData(
|
||||
coordinator=coordinator, names=names
|
||||
)
|
||||
hass.data[DOMAIN] = AnalyticsInsightsData(coordinator=coordinator, names=names)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
@ -62,7 +60,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
hass.data.pop(DOMAIN)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
|
|
@ -53,7 +53,6 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
self._async_abort_entries_match()
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
if not user_input.get(CONF_TRACKED_INTEGRATIONS) and not user_input.get(
|
||||
|
|
|
@ -7,5 +7,6 @@
|
|||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_homeassistant_analytics"],
|
||||
"requirements": ["python-homeassistant-analytics==0.6.0"]
|
||||
"requirements": ["python-homeassistant-analytics==0.6.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
|
|
@ -65,7 +65,7 @@ async def async_setup_entry(
|
|||
) -> None:
|
||||
"""Initialize the entries."""
|
||||
|
||||
analytics_data: AnalyticsInsightsData = hass.data[DOMAIN][entry.entry_id]
|
||||
analytics_data: AnalyticsInsightsData = hass.data[DOMAIN]
|
||||
coordinator: HomeassistantAnalyticsDataUpdateCoordinator = (
|
||||
analytics_data.coordinator
|
||||
)
|
||||
|
|
|
@ -13,8 +13,7 @@
|
|||
}
|
||||
},
|
||||
"abort": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"error": {
|
||||
"no_integration_selected": "You must select at least one integration to track"
|
||||
|
|
|
@ -19,10 +19,9 @@ from homeassistant.const import (
|
|||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entityfilter import FILTER_SCHEMA, EntityFilter
|
||||
from homeassistant.helpers.event import EventStateChangedData
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import ssl as ssl_util
|
||||
|
||||
|
|
|
@ -17,6 +17,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
|||
from .const import DOMAIN
|
||||
from .coordinator import APCUPSdCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_DESCRIPTION = BinarySensorEntityDescription(
|
||||
key="statflag",
|
||||
|
|
|
@ -28,6 +28,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
|||
from .const import DOMAIN
|
||||
from .coordinator import APCUPSdCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SENSORS: dict[str, SensorEntityDescription] = {
|
||||
|
|
|
@ -37,7 +37,7 @@ from homeassistant.const import (
|
|||
URL_API_TEMPLATE,
|
||||
)
|
||||
import homeassistant.core as ha
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant
|
||||
from homeassistant.exceptions import (
|
||||
InvalidEntityFormatError,
|
||||
InvalidStateError,
|
||||
|
@ -46,10 +46,10 @@ from homeassistant.exceptions import (
|
|||
Unauthorized,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, template
|
||||
from homeassistant.helpers.event import EventStateChangedData
|
||||
from homeassistant.helpers.json import json_dumps, json_fragment
|
||||
from homeassistant.helpers.service import async_get_all_descriptions
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.event_type import EventType
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@ -135,7 +135,7 @@ class APIEventStream(HomeAssistantView):
|
|||
stop_obj = object()
|
||||
to_write: asyncio.Queue[object | str] = asyncio.Queue()
|
||||
|
||||
restrict: list[str] | None = None
|
||||
restrict: list[EventType[Any] | str] | None = None
|
||||
if restrict_str := request.query.get("restrict"):
|
||||
restrict = [*restrict_str.split(","), EVENT_HOMEASSISTANT_STOP]
|
||||
|
||||
|
@ -284,7 +284,8 @@ class APIEntityStateView(HomeAssistantView):
|
|||
|
||||
# Read the state back for our response
|
||||
status_code = HTTPStatus.CREATED if is_new_state else HTTPStatus.OK
|
||||
assert (state := hass.states.get(entity_id))
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
resp = self.json(state.as_dict(), status_code)
|
||||
|
||||
resp.headers.add("Location", f"/api/states/{entity_id}")
|
||||
|
@ -398,7 +399,6 @@ class APIDomainServicesView(HomeAssistantView):
|
|||
cancel_listen = hass.bus.async_listen(
|
||||
EVENT_STATE_CHANGED,
|
||||
_async_save_changed_entities,
|
||||
run_immediately=True,
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
@ -102,9 +102,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
await manager.disconnect()
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, on_hass_stop, run_immediately=True
|
||||
)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
|
|
@ -76,9 +76,9 @@ async def device_scan(
|
|||
return None
|
||||
try:
|
||||
ip_address(identifier)
|
||||
return [identifier]
|
||||
except ValueError:
|
||||
return None
|
||||
return [identifier]
|
||||
|
||||
# If we have an address, only probe that address to avoid
|
||||
# broadcast traffic on the network
|
||||
|
@ -380,9 +380,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
},
|
||||
)
|
||||
if entry.source != SOURCE_IGNORE:
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(entry.entry_id)
|
||||
)
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
if not allow_exist:
|
||||
raise DeviceAlreadyConfigured
|
||||
|
||||
|
|
|
@ -5,10 +5,14 @@ from collections.abc import Iterable
|
|||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyatv.const import InputAction
|
||||
|
||||
from homeassistant.components.remote import (
|
||||
ATTR_DELAY_SECS,
|
||||
ATTR_HOLD_SECS,
|
||||
ATTR_NUM_REPEATS,
|
||||
DEFAULT_DELAY_SECS,
|
||||
DEFAULT_HOLD_SECS,
|
||||
RemoteEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
@ -29,7 +33,6 @@ COMMAND_TO_ATTRIBUTE = {
|
|||
"turn_off": ("power", "turn_off"),
|
||||
"volume_up": ("audio", "volume_up"),
|
||||
"volume_down": ("audio", "volume_down"),
|
||||
"home_hold": ("remote_control", "home"),
|
||||
}
|
||||
|
||||
|
||||
|
@ -66,6 +69,7 @@ class AppleTVRemote(AppleTVEntity, RemoteEntity):
|
|||
"""Send a command to one device."""
|
||||
num_repeats = kwargs[ATTR_NUM_REPEATS]
|
||||
delay = kwargs.get(ATTR_DELAY_SECS, DEFAULT_DELAY_SECS)
|
||||
hold_secs = kwargs.get(ATTR_HOLD_SECS, DEFAULT_HOLD_SECS)
|
||||
|
||||
if not self.atv:
|
||||
_LOGGER.error("Unable to send commands, not connected to %s", self.name)
|
||||
|
@ -84,5 +88,10 @@ class AppleTVRemote(AppleTVEntity, RemoteEntity):
|
|||
raise ValueError("Command not found. Exiting sequence")
|
||||
|
||||
_LOGGER.info("Sending command %s", single_command)
|
||||
await attr_value()
|
||||
|
||||
if hold_secs >= 1:
|
||||
await attr_value(action=InputAction.Hold)
|
||||
else:
|
||||
await attr_value()
|
||||
|
||||
await asyncio.sleep(delay)
|
||||
|
|
|
@ -107,9 +107,7 @@ class AprilaireClimate(BaseAprilaireEntity, ClimateEntity):
|
|||
|
||||
features = features | ClimateEntityFeature.PRESET_MODE
|
||||
|
||||
features = features | ClimateEntityFeature.FAN_MODE
|
||||
|
||||
return features
|
||||
return features | ClimateEntityFeature.FAN_MODE
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> int | None:
|
||||
|
|
|
@ -8,7 +8,7 @@ from typing import Any
|
|||
from pyaprilaire.const import Attribute
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
@ -26,14 +26,14 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
|||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
class AprilaireConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Aprilaire."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> config_entries.ConfigFlowResult:
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
|
||||
if user_input is None:
|
||||
|
|
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||
|
||||
from collections.abc import Awaitable, Callable
|
||||
import logging
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import pyaprilaire.client
|
||||
from pyaprilaire.const import MODELS, Attribute, FunctionalDomain
|
||||
|
@ -155,7 +155,7 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
|||
|
||||
return self.create_device_name(self.data)
|
||||
|
||||
def create_device_name(self, data: Optional[dict[str, Any]]) -> str:
|
||||
def create_device_name(self, data: dict[str, Any] | None) -> str:
|
||||
"""Create the name of the thermostat."""
|
||||
|
||||
name = data.get(Attribute.NAME) if data else None
|
||||
|
|
|
@ -5,5 +5,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/aprs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aprslib", "geographiclib", "geopy"],
|
||||
"requirements": ["aprslib==0.7.0", "geopy==2.3.0"]
|
||||
"requirements": ["aprslib==0.7.2", "geopy==2.3.0"]
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
|
||||
}
|
||||
},
|
||||
"flow_title": "[%key:component::bluetooth::config::flow_title%]",
|
||||
"flow_title": "{name}",
|
||||
"error": {
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
|
|
|
@ -257,7 +257,7 @@ class ArcamFmj(MediaPlayerEntity):
|
|||
for preset in presets.values()
|
||||
]
|
||||
|
||||
root = BrowseMedia(
|
||||
return BrowseMedia(
|
||||
title="Arcam FMJ Receiver",
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_id="root",
|
||||
|
@ -267,8 +267,6 @@ class ArcamFmj(MediaPlayerEntity):
|
|||
children=radio,
|
||||
)
|
||||
|
||||
return root
|
||||
|
||||
@convert_exception
|
||||
async def async_play_media(
|
||||
self, media_type: MediaType | str, media_id: str, **kwargs: Any
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp.client_exceptions import ClientResponseError
|
||||
from arris_tg2492lg import ConnectBox, Device
|
||||
import voluptuous as vol
|
||||
|
||||
|
@ -12,6 +13,7 @@ from homeassistant.components.device_tracker import (
|
|||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
|
@ -25,12 +27,21 @@ PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(
|
|||
)
|
||||
|
||||
|
||||
def get_scanner(hass: HomeAssistant, config: ConfigType) -> ArrisDeviceScanner:
|
||||
"""Return the Arris device scanner."""
|
||||
async def async_get_scanner(
|
||||
hass: HomeAssistant, config: ConfigType
|
||||
) -> ArrisDeviceScanner | None:
|
||||
"""Return the Arris device scanner if successful."""
|
||||
conf = config[DOMAIN]
|
||||
url = f"http://{conf[CONF_HOST]}"
|
||||
connect_box = ConnectBox(url, conf[CONF_PASSWORD])
|
||||
return ArrisDeviceScanner(connect_box)
|
||||
websession = async_get_clientsession(hass)
|
||||
connect_box = ConnectBox(websession, url, conf[CONF_PASSWORD])
|
||||
|
||||
try:
|
||||
await connect_box.async_login()
|
||||
|
||||
return ArrisDeviceScanner(connect_box)
|
||||
except ClientResponseError:
|
||||
return None
|
||||
|
||||
|
||||
class ArrisDeviceScanner(DeviceScanner):
|
||||
|
@ -41,23 +52,22 @@ class ArrisDeviceScanner(DeviceScanner):
|
|||
self.connect_box = connect_box
|
||||
self.last_results: list[Device] = []
|
||||
|
||||
def scan_devices(self) -> list[str]:
|
||||
async def async_scan_devices(self) -> list[str]:
|
||||
"""Scan for new devices and return a list with found device IDs."""
|
||||
self._update_info()
|
||||
await self._async_update_info()
|
||||
|
||||
return [device.mac for device in self.last_results if device.mac]
|
||||
|
||||
def get_device_name(self, device: str) -> str | None:
|
||||
async def async_get_device_name(self, device: str) -> str | None:
|
||||
"""Return the name of the given device or None if we don't know."""
|
||||
name = next(
|
||||
return next(
|
||||
(result.hostname for result in self.last_results if result.mac == device),
|
||||
None,
|
||||
)
|
||||
return name
|
||||
|
||||
def _update_info(self) -> None:
|
||||
async def _async_update_info(self) -> None:
|
||||
"""Ensure the information from the Arris TG2492LG router is up to date."""
|
||||
result = self.connect_box.get_connected_devices()
|
||||
result = await self.connect_box.async_get_connected_devices()
|
||||
|
||||
last_results: list[Device] = []
|
||||
mac_addresses: set[str | None] = set()
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue