pull/94998/head 2023.10.0
Franck Nijhof 2023-10-04 16:03:55 +02:00 committed by GitHub
commit 22bf1a0582
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1698 changed files with 103369 additions and 29687 deletions

View File

@ -29,11 +29,13 @@ omit =
homeassistant/components/adguard/switch.py
homeassistant/components/ads/*
homeassistant/components/aemet/weather_update_coordinator.py
homeassistant/components/aftership/*
homeassistant/components/aftership/__init__.py
homeassistant/components/aftership/sensor.py
homeassistant/components/agent_dvr/alarm_control_panel.py
homeassistant/components/agent_dvr/camera.py
homeassistant/components/agent_dvr/helpers.py
homeassistant/components/airnow/__init__.py
homeassistant/components/airnow/coordinator.py
homeassistant/components/airnow/sensor.py
homeassistant/components/airq/__init__.py
homeassistant/components/airq/coordinator.py
@ -44,6 +46,7 @@ omit =
homeassistant/components/airthings_ble/sensor.py
homeassistant/components/airtouch4/__init__.py
homeassistant/components/airtouch4/climate.py
homeassistant/components/airtouch4/coordinator.py
homeassistant/components/airvisual/__init__.py
homeassistant/components/airvisual/sensor.py
homeassistant/components/airvisual_pro/__init__.py
@ -100,6 +103,7 @@ omit =
homeassistant/components/azure_devops/__init__.py
homeassistant/components/azure_devops/sensor.py
homeassistant/components/azure_service_bus/*
homeassistant/components/awair/coordinator.py
homeassistant/components/baf/__init__.py
homeassistant/components/baf/climate.py
homeassistant/components/baf/entity.py
@ -171,6 +175,7 @@ omit =
homeassistant/components/comed_hourly_pricing/sensor.py
homeassistant/components/comelit/__init__.py
homeassistant/components/comelit/const.py
homeassistant/components/comelit/cover.py
homeassistant/components/comelit/coordinator.py
homeassistant/components/comelit/light.py
homeassistant/components/comfoconnect/fan.py
@ -179,6 +184,7 @@ omit =
homeassistant/components/control4/__init__.py
homeassistant/components/control4/director_utils.py
homeassistant/components/control4/light.py
homeassistant/components/coolmaster/coordinator.py
homeassistant/components/cppm_tracker/device_tracker.py
homeassistant/components/crownstone/__init__.py
homeassistant/components/crownstone/devices.py
@ -242,6 +248,8 @@ omit =
homeassistant/components/duotecno/switch.py
homeassistant/components/duotecno/cover.py
homeassistant/components/duotecno/light.py
homeassistant/components/duotecno/climate.py
homeassistant/components/duotecno/binary_sensor.py
homeassistant/components/dwd_weather_warnings/const.py
homeassistant/components/dwd_weather_warnings/coordinator.py
homeassistant/components/dwd_weather_warnings/sensor.py
@ -255,6 +263,12 @@ omit =
homeassistant/components/ecobee/notify.py
homeassistant/components/ecobee/sensor.py
homeassistant/components/ecobee/weather.py
homeassistant/components/ecoforest/__init__.py
homeassistant/components/ecoforest/coordinator.py
homeassistant/components/ecoforest/entity.py
homeassistant/components/ecoforest/number.py
homeassistant/components/ecoforest/sensor.py
homeassistant/components/ecoforest/switch.py
homeassistant/components/econet/__init__.py
homeassistant/components/econet/binary_sensor.py
homeassistant/components/econet/climate.py
@ -276,7 +290,6 @@ omit =
homeassistant/components/electric_kiwi/__init__.py
homeassistant/components/electric_kiwi/api.py
homeassistant/components/electric_kiwi/oauth2.py
homeassistant/components/electric_kiwi/sensor.py
homeassistant/components/electric_kiwi/coordinator.py
homeassistant/components/electric_kiwi/select.py
homeassistant/components/eliqonline/sensor.py
@ -355,6 +368,7 @@ omit =
homeassistant/components/ezviz/update.py
homeassistant/components/faa_delays/__init__.py
homeassistant/components/faa_delays/binary_sensor.py
homeassistant/components/faa_delays/coordinator.py
homeassistant/components/familyhub/camera.py
homeassistant/components/fastdotcom/*
homeassistant/components/ffmpeg/camera.py
@ -379,7 +393,6 @@ omit =
homeassistant/components/firmata/pin.py
homeassistant/components/firmata/sensor.py
homeassistant/components/firmata/switch.py
homeassistant/components/fitbit/*
homeassistant/components/fivem/__init__.py
homeassistant/components/fivem/binary_sensor.py
homeassistant/components/fivem/coordinator.py
@ -528,7 +541,12 @@ omit =
homeassistant/components/hvv_departures/__init__.py
homeassistant/components/hvv_departures/binary_sensor.py
homeassistant/components/hvv_departures/sensor.py
homeassistant/components/hydrawise/*
homeassistant/components/hydrawise/__init__.py
homeassistant/components/hydrawise/binary_sensor.py
homeassistant/components/hydrawise/const.py
homeassistant/components/hydrawise/coordinator.py
homeassistant/components/hydrawise/sensor.py
homeassistant/components/hydrawise/switch.py
homeassistant/components/ialarm/alarm_control_panel.py
homeassistant/components/iammeter/sensor.py
homeassistant/components/iaqualink/binary_sensor.py
@ -654,6 +672,7 @@ omit =
homeassistant/components/lg_soundbar/__init__.py
homeassistant/components/lg_soundbar/media_player.py
homeassistant/components/life360/__init__.py
homeassistant/components/life360/button.py
homeassistant/components/life360/coordinator.py
homeassistant/components/life360/device_tracker.py
homeassistant/components/lightwave/*
@ -703,11 +722,13 @@ omit =
homeassistant/components/mailgun/notify.py
homeassistant/components/map/*
homeassistant/components/mastodon/notify.py
homeassistant/components/matrix/*
homeassistant/components/matrix/__init__.py
homeassistant/components/matrix/notify.py
homeassistant/components/matter/__init__.py
homeassistant/components/meater/__init__.py
homeassistant/components/meater/sensor.py
homeassistant/components/media_extractor/*
homeassistant/components/medcom_ble/__init__.py
homeassistant/components/medcom_ble/sensor.py
homeassistant/components/mediaroom/media_player.py
homeassistant/components/melcloud/__init__.py
homeassistant/components/melcloud/climate.py
@ -731,6 +752,7 @@ omit =
homeassistant/components/mill/sensor.py
homeassistant/components/minecraft_server/__init__.py
homeassistant/components/minecraft_server/binary_sensor.py
homeassistant/components/minecraft_server/coordinator.py
homeassistant/components/minecraft_server/entity.py
homeassistant/components/minecraft_server/sensor.py
homeassistant/components/minio/minio_helper.py
@ -746,7 +768,9 @@ omit =
homeassistant/components/moehlenhoff_alpha2/climate.py
homeassistant/components/moehlenhoff_alpha2/sensor.py
homeassistant/components/motion_blinds/__init__.py
homeassistant/components/motion_blinds/coordinator.py
homeassistant/components/motion_blinds/cover.py
homeassistant/components/motion_blinds/entity.py
homeassistant/components/motion_blinds/sensor.py
homeassistant/components/mpd/media_player.py
homeassistant/components/mqtt_room/sensor.py
@ -790,6 +814,7 @@ omit =
homeassistant/components/netgear/__init__.py
homeassistant/components/netgear/button.py
homeassistant/components/netgear/device_tracker.py
homeassistant/components/netgear/entity.py
homeassistant/components/netgear/router.py
homeassistant/components/netgear/sensor.py
homeassistant/components/netgear/switch.py
@ -842,6 +867,7 @@ omit =
homeassistant/components/obihai/connectivity.py
homeassistant/components/obihai/sensor.py
homeassistant/components/octoprint/__init__.py
homeassistant/components/octoprint/coordinator.py
homeassistant/components/oem/climate.py
homeassistant/components/ohmconnect/sensor.py
homeassistant/components/ombi/*
@ -872,6 +898,7 @@ omit =
homeassistant/components/opengarage/cover.py
homeassistant/components/opengarage/entity.py
homeassistant/components/opengarage/sensor.py
homeassistant/components/openhardwaremonitor/sensor.py
homeassistant/components/openhome/__init__.py
homeassistant/components/openhome/const.py
homeassistant/components/openhome/media_player.py
@ -953,6 +980,8 @@ omit =
homeassistant/components/point/sensor.py
homeassistant/components/poolsense/__init__.py
homeassistant/components/poolsense/binary_sensor.py
homeassistant/components/poolsense/coordinator.py
homeassistant/components/poolsense/entity.py
homeassistant/components/poolsense/sensor.py
homeassistant/components/powerwall/__init__.py
homeassistant/components/progettihwsw/__init__.py
@ -1003,9 +1032,13 @@ omit =
homeassistant/components/rainmachine/util.py
homeassistant/components/renson/__init__.py
homeassistant/components/renson/const.py
homeassistant/components/renson/coordinator.py
homeassistant/components/renson/entity.py
homeassistant/components/renson/sensor.py
homeassistant/components/renson/button.py
homeassistant/components/renson/fan.py
homeassistant/components/renson/binary_sensor.py
homeassistant/components/renson/number.py
homeassistant/components/raspyrfm/*
homeassistant/components/recollect_waste/sensor.py
homeassistant/components/recorder/repack.py
@ -1066,9 +1099,10 @@ omit =
homeassistant/components/saj/sensor.py
homeassistant/components/satel_integra/*
homeassistant/components/schluter/*
homeassistant/components/screenlogic/__init__.py
homeassistant/components/screenlogic/binary_sensor.py
homeassistant/components/screenlogic/climate.py
homeassistant/components/screenlogic/coordinator.py
homeassistant/components/screenlogic/const.py
homeassistant/components/screenlogic/entity.py
homeassistant/components/screenlogic/light.py
homeassistant/components/screenlogic/number.py
@ -1132,6 +1166,7 @@ omit =
homeassistant/components/smarty/*
homeassistant/components/sms/__init__.py
homeassistant/components/sms/const.py
homeassistant/components/sms/coordinator.py
homeassistant/components/sms/gateway.py
homeassistant/components/sms/notify.py
homeassistant/components/sms/sensor.py
@ -1148,6 +1183,7 @@ omit =
homeassistant/components/solaredge_local/sensor.py
homeassistant/components/solarlog/__init__.py
homeassistant/components/solarlog/sensor.py
homeassistant/components/solarlog/coordinator.py
homeassistant/components/solax/__init__.py
homeassistant/components/solax/sensor.py
homeassistant/components/soma/__init__.py
@ -1240,6 +1276,9 @@ omit =
homeassistant/components/switchbot/sensor.py
homeassistant/components/switchbot/switch.py
homeassistant/components/switchbot/lock.py
homeassistant/components/switchbot_cloud/coordinator.py
homeassistant/components/switchbot_cloud/entity.py
homeassistant/components/switchbot_cloud/switch.py
homeassistant/components/switchmate/switch.py
homeassistant/components/syncthing/__init__.py
homeassistant/components/syncthing/sensor.py
@ -1262,6 +1301,7 @@ omit =
homeassistant/components/system_bridge/__init__.py
homeassistant/components/system_bridge/binary_sensor.py
homeassistant/components/system_bridge/coordinator.py
homeassistant/components/system_bridge/notify.py
homeassistant/components/system_bridge/sensor.py
homeassistant/components/systemmonitor/sensor.py
homeassistant/components/tado/__init__.py
@ -1273,6 +1313,8 @@ omit =
homeassistant/components/tank_utility/sensor.py
homeassistant/components/tankerkoenig/__init__.py
homeassistant/components/tankerkoenig/binary_sensor.py
homeassistant/components/tankerkoenig/coordinator.py
homeassistant/components/tankerkoenig/entity.py
homeassistant/components/tankerkoenig/sensor.py
homeassistant/components/tapsaff/binary_sensor.py
homeassistant/components/tautulli/__init__.py
@ -1441,9 +1483,11 @@ omit =
homeassistant/components/vlc_telnet/__init__.py
homeassistant/components/vlc_telnet/media_player.py
homeassistant/components/vodafone_station/__init__.py
homeassistant/components/vodafone_station/button.py
homeassistant/components/vodafone_station/const.py
homeassistant/components/vodafone_station/coordinator.py
homeassistant/components/vodafone_station/device_tracker.py
homeassistant/components/vodafone_station/sensor.py
homeassistant/components/volkszaehler/sensor.py
homeassistant/components/volumio/__init__.py
homeassistant/components/volumio/browse_media.py
@ -1464,11 +1508,15 @@ omit =
homeassistant/components/watson_tts/tts.py
homeassistant/components/watttime/__init__.py
homeassistant/components/watttime/sensor.py
homeassistant/components/weatherflow/__init__.py
homeassistant/components/weatherflow/const.py
homeassistant/components/weatherflow/sensor.py
homeassistant/components/wiffi/__init__.py
homeassistant/components/wiffi/binary_sensor.py
homeassistant/components/wiffi/sensor.py
homeassistant/components/wiffi/wiffi_strings.py
homeassistant/components/wirelesstag/*
homeassistant/components/withings/api.py
homeassistant/components/wolflink/__init__.py
homeassistant/components/wolflink/sensor.py
homeassistant/components/worldtidesinfo/sensor.py

View File

@ -24,7 +24,7 @@ jobs:
publish: ${{ steps.version.outputs.publish }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
with:
fetch-depth: 0
@ -56,7 +56,7 @@ jobs:
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.7.0
@ -98,7 +98,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
@ -190,7 +190,7 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
uses: docker/login-action@v3.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@ -252,7 +252,7 @@ jobs:
- green
steps:
- name: Checkout the repository
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set build additional args
run: |
@ -266,7 +266,7 @@ jobs:
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
uses: docker/login-action@v3.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@ -289,7 +289,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@ -327,21 +327,21 @@ jobs:
id-token: write
steps:
- name: Checkout the repository
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Install Cosign
uses: sigstore/cosign-installer@v3.1.1
uses: sigstore/cosign-installer@v3.1.2
with:
cosign-release: "v2.0.2"
- name: Login to DockerHub
uses: docker/login-action@v2.2.0
uses: docker/login-action@v3.0.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
uses: docker/login-action@v3.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}

View File

@ -35,8 +35,9 @@ on:
env:
CACHE_VERSION: 5
PIP_CACHE_VERSION: 4
MYPY_CACHE_VERSION: 4
HA_SHORT_VERSION: 2023.9
MYPY_CACHE_VERSION: 5
BLACK_CACHE_VERSION: 1
HA_SHORT_VERSION: "2023.10"
DEFAULT_PYTHON: "3.11"
ALL_PYTHON_VERSIONS: "['3.11']"
# 10.3 is the oldest supported version
@ -55,6 +56,7 @@ env:
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
PRE_COMMIT_CACHE: ~/.cache/pre-commit
PIP_CACHE: /tmp/pip-cache
BLACK_CACHE: /tmp/black-cache
SQLALCHEMY_WARN_20: 1
PYTHONASYNCIODEBUG: 1
HASS_CI: 1
@ -87,7 +89,7 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Generate partial Python venv restore key
id: generate_python_cache_key
run: >-
@ -220,7 +222,7 @@ jobs:
- info
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v4.7.0
@ -229,7 +231,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.1
uses: actions/cache@v3.3.2
with:
path: venv
key: >-
@ -244,7 +246,7 @@ jobs:
pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.1
uses: actions/cache@v3.3.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@ -265,16 +267,23 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.7.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Generate partial black restore key
id: generate-black-key
run: |
black_version=$(cat requirements_test_pre_commit.txt | grep black | cut -d '=' -f 3)
echo "version=$black_version" >> $GITHUB_OUTPUT
echo "key=black-${{ env.BLACK_CACHE_VERSION }}-$black_version-${{
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: venv
fail-on-cache-miss: true
@ -283,21 +292,36 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Restore black cache
uses: actions/cache@v3.3.2
with:
path: ${{ env.BLACK_CACHE }}
key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
steps.generate-black-key.outputs.key }}
restore-keys: |
${{ runner.os }}-${{ steps.python.outputs.python-version }}-black-${{
env.BLACK_CACHE_VERSION }}-${{ steps.generate-black-key.outputs.version }}-${{
env.HA_SHORT_VERSION }}-
- name: Run black (fully)
if: needs.info.outputs.test_full_suite == 'true'
env:
BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }}
run: |
. venv/bin/activate
pre-commit run --hook-stage manual black --all-files --show-diff-on-failure
- name: Run black (partially)
if: needs.info.outputs.test_full_suite == 'false'
shell: bash
env:
BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }}
run: |
. venv/bin/activate
shopt -s globstar
@ -311,7 +335,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.7.0
id: python
@ -320,7 +344,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: venv
fail-on-cache-miss: true
@ -329,7 +353,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@ -360,7 +384,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.7.0
id: python
@ -369,7 +393,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: venv
fail-on-cache-miss: true
@ -378,7 +402,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@ -454,7 +478,7 @@ jobs:
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4.7.0
@ -468,7 +492,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.1
uses: actions/cache@v3.3.2
with:
path: venv
lookup-only: true
@ -477,7 +501,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore pip wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@v3.3.1
uses: actions/cache@v3.3.2
with:
path: ${{ env.PIP_CACHE }}
key: >-
@ -522,7 +546,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v4.7.0
@ -531,7 +555,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: venv
fail-on-cache-miss: true
@ -554,7 +578,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v4.7.0
@ -563,7 +587,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: venv
fail-on-cache-miss: true
@ -587,7 +611,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v4.7.0
@ -596,7 +620,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: venv
fail-on-cache-miss: true
@ -631,7 +655,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v4.7.0
@ -647,7 +671,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: venv
fail-on-cache-miss: true
@ -655,7 +679,7 @@ jobs:
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@v3.3.1
uses: actions/cache@v3.3.2
with:
path: .mypy_cache
key: >-
@ -713,7 +737,7 @@ jobs:
bluez \
ffmpeg
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4.7.0
@ -722,7 +746,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: venv
fail-on-cache-miss: true
@ -865,7 +889,7 @@ jobs:
ffmpeg \
libmariadb-dev-compat
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4.7.0
@ -874,7 +898,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: venv
fail-on-cache-miss: true
@ -989,7 +1013,7 @@ jobs:
ffmpeg \
postgresql-server-dev-14
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4.7.0
@ -998,7 +1022,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.3.2
with:
path: venv
fail-on-cache-miss: true
@ -1084,7 +1108,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Download all coverage artifacts
uses: actions/download-artifact@v3
- name: Upload coverage to Codecov (full coverage)

View File

@ -42,7 +42,7 @@ jobs:
id: token
# Pinned to a specific version of the action for security reasons
# v1.7.0
uses: tibdex/github-app-token@b62528385c34dbc9f38e5f4225ac829252d1ea92
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
with:
app_id: ${{ secrets.ISSUE_TRIAGE_APP_ID }}
private_key: ${{ secrets.ISSUE_TRIAGE_APP_PEM }}

View File

@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.7.0

View File

@ -26,7 +26,7 @@ jobs:
architectures: ${{ steps.info.outputs.architectures }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Get information
id: info
@ -56,7 +56,7 @@ jobs:
echo "CI_BUILD=1"
echo "ENABLE_HEADLESS=1"
# Use C-Extension for sqlalchemy
# Use C-Extension for SQLAlchemy
echo "REQUIRE_SQLALCHEMY_CEXT=1"
) > .env_file
@ -84,7 +84,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Download env_file
uses: actions/download-artifact@v3
@ -122,7 +122,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.6.0
uses: actions/checkout@v4.1.0
- name: Download env_file
uses: actions/download-artifact@v3
@ -186,7 +186,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
skip-binary: aiohttp;grpcio;sqlalchemy;protobuf
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtaa"
@ -200,7 +200,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
skip-binary: aiohttp;grpcio;sqlalchemy;protobuf
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtab"
@ -214,7 +214,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
skip-binary: aiohttp;grpcio;sqlalchemy;protobuf
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtac"

View File

@ -1,12 +1,12 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.285
rev: v0.0.289
hooks:
- id: ruff
args:
- --fix
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.7.0
rev: 23.9.1
hooks:
- id: black
args:
@ -21,7 +21,7 @@ repos:
- --skip="./.*,*.csv,*.json,*.ambr"
- --quiet-level=2
exclude_types: [csv, json]
exclude: ^tests/fixtures/|homeassistant/generated/
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:

View File

@ -88,6 +88,7 @@ homeassistant.components.camera.*
homeassistant.components.canary.*
homeassistant.components.clickatell.*
homeassistant.components.clicksend.*
homeassistant.components.climate.*
homeassistant.components.cloud.*
homeassistant.components.configurator.*
homeassistant.components.cover.*
@ -136,9 +137,11 @@ homeassistant.components.fully_kiosk.*
homeassistant.components.geo_location.*
homeassistant.components.geocaching.*
homeassistant.components.gios.*
homeassistant.components.glances.*
homeassistant.components.goalzero.*
homeassistant.components.google.*
homeassistant.components.google_sheets.*
homeassistant.components.gpsd.*
homeassistant.components.greeneye_monitor.*
homeassistant.components.group.*
homeassistant.components.guardian.*
@ -177,6 +180,7 @@ homeassistant.components.huawei_lte.*
homeassistant.components.hydrawise.*
homeassistant.components.hyperion.*
homeassistant.components.ibeacon.*
homeassistant.components.idasen_desk.*
homeassistant.components.image.*
homeassistant.components.image_processing.*
homeassistant.components.image_upload.*
@ -186,6 +190,7 @@ homeassistant.components.input_select.*
homeassistant.components.integration.*
homeassistant.components.ipp.*
homeassistant.components.iqvia.*
homeassistant.components.islamic_prayer_times.*
homeassistant.components.isy994.*
homeassistant.components.jellyfin.*
homeassistant.components.jewish_calendar.*
@ -209,10 +214,12 @@ homeassistant.components.local_ip.*
homeassistant.components.lock.*
homeassistant.components.logbook.*
homeassistant.components.logger.*
homeassistant.components.london_underground.*
homeassistant.components.lookin.*
homeassistant.components.luftdaten.*
homeassistant.components.mailbox.*
homeassistant.components.mastodon.*
homeassistant.components.matrix.*
homeassistant.components.matter.*
homeassistant.components.media_extractor.*
homeassistant.components.media_player.*
@ -253,7 +260,10 @@ homeassistant.components.peco.*
homeassistant.components.persistent_notification.*
homeassistant.components.pi_hole.*
homeassistant.components.ping.*
homeassistant.components.plugwise.*
homeassistant.components.poolsense.*
homeassistant.components.powerwall.*
homeassistant.components.private_ble_device.*
homeassistant.components.proximity.*
homeassistant.components.prusalink.*
homeassistant.components.pure_energie.*
@ -311,6 +321,7 @@ homeassistant.components.sun.*
homeassistant.components.surepetcare.*
homeassistant.components.switch.*
homeassistant.components.switchbee.*
homeassistant.components.switchbot_cloud.*
homeassistant.components.switcher_kis.*
homeassistant.components.synology_dsm.*
homeassistant.components.systemmonitor.*
@ -332,6 +343,7 @@ homeassistant.components.trafikverket_camera.*
homeassistant.components.trafikverket_ferry.*
homeassistant.components.trafikverket_train.*
homeassistant.components.trafikverket_weatherstation.*
homeassistant.components.trend.*
homeassistant.components.tts.*
homeassistant.components.twentemilieu.*
homeassistant.components.unifi.*

View File

@ -47,8 +47,10 @@ build.json @home-assistant/supervisor
/tests/components/airq/ @Sibgatulin @dl2080
/homeassistant/components/airthings/ @danielhiversen
/tests/components/airthings/ @danielhiversen
/homeassistant/components/airthings_ble/ @vincegio
/tests/components/airthings_ble/ @vincegio
/homeassistant/components/airthings_ble/ @vincegio @LaStrada
/tests/components/airthings_ble/ @vincegio @LaStrada
/homeassistant/components/airtouch4/ @samsinnamon
/tests/components/airtouch4/ @samsinnamon
/homeassistant/components/airvisual/ @bachya
/tests/components/airvisual/ @bachya
/homeassistant/components/airvisual_pro/ @bachya
@ -203,6 +205,8 @@ build.json @home-assistant/supervisor
/tests/components/cloud/ @home-assistant/cloud
/homeassistant/components/cloudflare/ @ludeeus @ctalkington
/tests/components/cloudflare/ @ludeeus @ctalkington
/homeassistant/components/co2signal/ @jpbede
/tests/components/co2signal/ @jpbede
/homeassistant/components/coinbase/ @tombrien
/tests/components/coinbase/ @tombrien
/homeassistant/components/color_extractor/ @GenericStudent
@ -305,6 +309,8 @@ build.json @home-assistant/supervisor
/tests/components/easyenergy/ @klaasnicolaas
/homeassistant/components/ecobee/ @marthoc @marcolivierarsenault
/tests/components/ecobee/ @marthoc @marcolivierarsenault
/homeassistant/components/ecoforest/ @pjanuario
/tests/components/ecoforest/ @pjanuario
/homeassistant/components/econet/ @vangorra @w1ll1am23
/tests/components/econet/ @vangorra @w1ll1am23
/homeassistant/components/ecovacs/ @OverloadUT @mib1185
@ -354,8 +360,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/eq3btsmart/ @rytilahti
/homeassistant/components/escea/ @lazdavila
/tests/components/escea/ @lazdavila
/homeassistant/components/esphome/ @OttoWinter @jesserockz @bdraco
/tests/components/esphome/ @OttoWinter @jesserockz @bdraco
/homeassistant/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
/tests/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
/homeassistant/components/eufylife_ble/ @bdr99
/tests/components/eufylife_ble/ @bdr99
/homeassistant/components/event/ @home-assistant/core
@ -384,6 +390,8 @@ build.json @home-assistant/supervisor
/tests/components/fireservicerota/ @cyberjunky
/homeassistant/components/firmata/ @DaAwesomeP
/tests/components/firmata/ @DaAwesomeP
/homeassistant/components/fitbit/ @allenporter
/tests/components/fitbit/ @allenporter
/homeassistant/components/fivem/ @Sander0542
/tests/components/fivem/ @Sander0542
/homeassistant/components/fjaraskupan/ @elupus
@ -396,8 +404,8 @@ build.json @home-assistant/supervisor
/tests/components/flo/ @dmulcahey
/homeassistant/components/flume/ @ChrisMandich @bdraco @jeeftor
/tests/components/flume/ @ChrisMandich @bdraco @jeeftor
/homeassistant/components/flux_led/ @icemanch @bdraco
/tests/components/flux_led/ @icemanch @bdraco
/homeassistant/components/flux_led/ @icemanch
/tests/components/flux_led/ @icemanch
/homeassistant/components/forecast_solar/ @klaasnicolaas @frenck
/tests/components/forecast_solar/ @klaasnicolaas @frenck
/homeassistant/components/forked_daapd/ @uvjustin
@ -554,6 +562,7 @@ build.json @home-assistant/supervisor
/homeassistant/components/hvv_departures/ @vigonotion
/tests/components/hvv_departures/ @vigonotion
/homeassistant/components/hydrawise/ @dknowles2 @ptcryan
/tests/components/hydrawise/ @dknowles2 @ptcryan
/homeassistant/components/hyperion/ @dermotduffy
/tests/components/hyperion/ @dermotduffy
/homeassistant/components/ialarm/ @RyuzakiKK
@ -565,6 +574,8 @@ build.json @home-assistant/supervisor
/tests/components/ibeacon/ @bdraco
/homeassistant/components/icloud/ @Quentame @nzapponi
/tests/components/icloud/ @Quentame @nzapponi
/homeassistant/components/idasen_desk/ @abmantis
/tests/components/idasen_desk/ @abmantis
/homeassistant/components/ign_sismologia/ @exxamalte
/tests/components/ign_sismologia/ @exxamalte
/homeassistant/components/image/ @home-assistant/core
@ -684,8 +695,6 @@ build.json @home-assistant/supervisor
/tests/components/lidarr/ @tkdrob
/homeassistant/components/life360/ @pnbruckner
/tests/components/life360/ @pnbruckner
/homeassistant/components/lifx/ @bdraco
/tests/components/lifx/ @bdraco
/homeassistant/components/light/ @home-assistant/core
/tests/components/light/ @home-assistant/core
/homeassistant/components/linux_battery/ @fabaff
@ -707,6 +716,8 @@ build.json @home-assistant/supervisor
/tests/components/logger/ @home-assistant/core
/homeassistant/components/logi_circle/ @evanjd
/tests/components/logi_circle/ @evanjd
/homeassistant/components/london_underground/ @jpbede
/tests/components/london_underground/ @jpbede
/homeassistant/components/lookin/ @ANMalko @bdraco
/tests/components/lookin/ @ANMalko @bdraco
/homeassistant/components/loqed/ @mikewoudenberg
@ -723,13 +734,18 @@ build.json @home-assistant/supervisor
/homeassistant/components/lyric/ @timmo001
/tests/components/lyric/ @timmo001
/homeassistant/components/mastodon/ @fabaff
/homeassistant/components/matrix/ @PaarthShah
/tests/components/matrix/ @PaarthShah
/homeassistant/components/matter/ @home-assistant/matter
/tests/components/matter/ @home-assistant/matter
/homeassistant/components/mazda/ @bdr99
/tests/components/mazda/ @bdr99
/homeassistant/components/meater/ @Sotolotl @emontnemery
/tests/components/meater/ @Sotolotl @emontnemery
/homeassistant/components/medcom_ble/ @elafargue
/tests/components/medcom_ble/ @elafargue
/homeassistant/components/media_extractor/ @joostlek
/tests/components/media_extractor/ @joostlek
/homeassistant/components/media_player/ @home-assistant/core
/tests/components/media_player/ @home-assistant/core
/homeassistant/components/media_source/ @hunterjm
@ -766,8 +782,8 @@ build.json @home-assistant/supervisor
/tests/components/moat/ @bdraco
/homeassistant/components/mobile_app/ @home-assistant/core
/tests/components/mobile_app/ @home-assistant/core
/homeassistant/components/modbus/ @adamchengtkc @janiversen @vzahradnik
/tests/components/modbus/ @adamchengtkc @janiversen @vzahradnik
/homeassistant/components/modbus/ @janiversen
/tests/components/modbus/ @janiversen
/homeassistant/components/modem_callerid/ @tkdrob
/tests/components/modem_callerid/ @tkdrob
/homeassistant/components/modern_forms/ @wonderslug
@ -793,8 +809,8 @@ build.json @home-assistant/supervisor
/tests/components/mutesync/ @currentoor
/homeassistant/components/my/ @home-assistant/core
/tests/components/my/ @home-assistant/core
/homeassistant/components/myq/ @ehendrix23
/tests/components/myq/ @ehendrix23
/homeassistant/components/myq/ @ehendrix23 @Lash-L
/tests/components/myq/ @ehendrix23 @Lash-L
/homeassistant/components/mysensors/ @MartinHjelmare @functionpointer
/tests/components/mysensors/ @MartinHjelmare @functionpointer
/homeassistant/components/mystrom/ @fabaff
@ -949,6 +965,8 @@ build.json @home-assistant/supervisor
/tests/components/poolsense/ @haemishkyd
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson
/homeassistant/components/private_ble_device/ @Jc2k
/tests/components/private_ble_device/ @Jc2k
/homeassistant/components/profiler/ @bdraco
/tests/components/profiler/ @bdraco
/homeassistant/components/progettihwsw/ @ardaseremet
@ -1057,8 +1075,8 @@ build.json @home-assistant/supervisor
/tests/components/rss_feed_template/ @home-assistant/core
/homeassistant/components/rtsp_to_webrtc/ @allenporter
/tests/components/rtsp_to_webrtc/ @allenporter
/homeassistant/components/ruckus_unleashed/ @gabe565 @lanrat
/tests/components/ruckus_unleashed/ @gabe565 @lanrat
/homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
/homeassistant/components/ruuvi_gateway/ @akx
/tests/components/ruuvi_gateway/ @akx
/homeassistant/components/ruuvitag_ble/ @akx
@ -1135,8 +1153,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/sky_hub/ @rogerselwyn
/homeassistant/components/skybell/ @tkdrob
/tests/components/skybell/ @tkdrob
/homeassistant/components/slack/ @tkdrob
/tests/components/slack/ @tkdrob
/homeassistant/components/slack/ @tkdrob @fletcherau
/tests/components/slack/ @tkdrob @fletcherau
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
/tests/components/sleepiq/ @mfugate1 @kbickar
/homeassistant/components/slide/ @ualex73
@ -1184,8 +1202,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/spider/ @peternijssen
/tests/components/spider/ @peternijssen
/homeassistant/components/splunk/ @Bre77
/homeassistant/components/spotify/ @frenck
/tests/components/spotify/ @frenck
/homeassistant/components/spotify/ @frenck @joostlek
/tests/components/spotify/ @frenck @joostlek
/homeassistant/components/sql/ @gjohansson-ST @dougiteixeira
/tests/components/sql/ @gjohansson-ST @dougiteixeira
/homeassistant/components/squeezebox/ @rajlaud
@ -1229,6 +1247,8 @@ build.json @home-assistant/supervisor
/tests/components/switchbee/ @jafar-atili
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
/homeassistant/components/switchbot_cloud/ @SeraphicRav
/tests/components/switchbot_cloud/ @SeraphicRav
/homeassistant/components/switcher_kis/ @thecode
/tests/components/switcher_kis/ @thecode
/homeassistant/components/switchmate/ @danielhiversen @qiz-li
@ -1309,14 +1329,16 @@ build.json @home-assistant/supervisor
/tests/components/trafikverket_weatherstation/ @endor-force @gjohansson-ST
/homeassistant/components/transmission/ @engrbm87 @JPHutchins
/tests/components/transmission/ @engrbm87 @JPHutchins
/homeassistant/components/trend/ @jpbede
/tests/components/trend/ @jpbede
/homeassistant/components/tts/ @home-assistant/core @pvizeli
/tests/components/tts/ @home-assistant/core @pvizeli
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
/tests/components/tuya/ @Tuya @zlinoliver @frenck
/homeassistant/components/twentemilieu/ @frenck
/tests/components/twentemilieu/ @frenck
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221
/tests/components/twinkly/ @dr1rrb @Robbie1221
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221 @Olen
/tests/components/twinkly/ @dr1rrb @Robbie1221 @Olen
/homeassistant/components/twitch/ @joostlek
/tests/components/twitch/ @joostlek
/homeassistant/components/ukraine_alarm/ @PaulAnnekov
@ -1352,11 +1374,11 @@ build.json @home-assistant/supervisor
/homeassistant/components/velbus/ @Cereal2nd @brefra
/tests/components/velbus/ @Cereal2nd @brefra
/homeassistant/components/velux/ @Julius2342
/homeassistant/components/venstar/ @garbled1
/tests/components/venstar/ @garbled1
/homeassistant/components/verisure/ @frenck @niro1987
/tests/components/verisure/ @frenck @niro1987
/homeassistant/components/versasense/ @flamm3blemuff1n
/homeassistant/components/venstar/ @garbled1 @jhollowe
/tests/components/venstar/ @garbled1 @jhollowe
/homeassistant/components/verisure/ @frenck
/tests/components/verisure/ @frenck
/homeassistant/components/versasense/ @imstevenxyz
/homeassistant/components/version/ @ludeeus
/tests/components/version/ @ludeeus
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey
@ -1384,7 +1406,8 @@ build.json @home-assistant/supervisor
/tests/components/wake_word/ @home-assistant/core @synesthesiam
/homeassistant/components/wallbox/ @hesselonline
/tests/components/wallbox/ @hesselonline
/homeassistant/components/waqi/ @andrey-git
/homeassistant/components/waqi/ @joostlek
/tests/components/waqi/ @joostlek
/homeassistant/components/water_heater/ @home-assistant/core
/tests/components/water_heater/ @home-assistant/core
/homeassistant/components/watson_tts/ @rutkai
@ -1394,6 +1417,10 @@ build.json @home-assistant/supervisor
/tests/components/waze_travel_time/ @eifinger
/homeassistant/components/weather/ @home-assistant/core
/tests/components/weather/ @home-assistant/core
/homeassistant/components/weatherflow/ @natekspencer @jeeftor
/tests/components/weatherflow/ @natekspencer @jeeftor
/homeassistant/components/weatherkit/ @tjhorner
/tests/components/weatherkit/ @tjhorner
/homeassistant/components/webhook/ @home-assistant/core
/tests/components/webhook/ @home-assistant/core
/homeassistant/components/webostv/ @thecode
@ -1411,8 +1438,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/wilight/ @leofig-rj
/tests/components/wilight/ @leofig-rj
/homeassistant/components/wirelesstag/ @sergeymaysak
/homeassistant/components/withings/ @vangorra
/tests/components/withings/ @vangorra
/homeassistant/components/withings/ @vangorra @joostlek
/tests/components/withings/ @vangorra @joostlek
/homeassistant/components/wiz/ @sbidy
/tests/components/wiz/ @sbidy
/homeassistant/components/wled/ @frenck
@ -1446,6 +1473,7 @@ build.json @home-assistant/supervisor
/homeassistant/components/yandex_transport/ @rishatik92 @devbis
/tests/components/yandex_transport/ @rishatik92 @devbis
/homeassistant/components/yardian/ @h3l1o5
/tests/components/yardian/ @h3l1o5
/homeassistant/components/yeelight/ @zewelor @shenxn @starkillerOG @alexyao2015
/tests/components/yeelight/ @zewelor @shenxn @starkillerOG @alexyao2015
/homeassistant/components/yeelightsunflower/ @lindsaymarkward

View File

@ -15,9 +15,8 @@ COPY homeassistant/package_constraints.txt homeassistant/homeassistant/
RUN \
pip3 install \
--no-cache-dir \
--no-index \
--only-binary=:all: \
--find-links "${WHEELS_LINKS}" \
--index-url "https://wheels.home-assistant.io/musllinux-index/" \
-r homeassistant/requirements.txt
COPY requirements_all.txt home_assistant_frontend-* home_assistant_intents-* homeassistant/
@ -39,9 +38,8 @@ RUN \
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
pip3 install \
--no-cache-dir \
--no-index \
--only-binary=:all: \
--find-links "${WHEELS_LINKS}" \
--index-url "https://wheels.home-assistant.io/musllinux-index/" \
-r homeassistant/requirements_all.txt
## Setup Home Assistant Core
@ -49,9 +47,8 @@ COPY . homeassistant/
RUN \
pip3 install \
--no-cache-dir \
--no-index \
--only-binary=:all: \
--find-links "${WHEELS_LINKS}" \
--index-url "https://wheels.home-assistant.io/musllinux-index/" \
-e ./homeassistant \
&& python3 -m compileall \
homeassistant/homeassistant

View File

@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2023.08.0
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2023.08.0
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2023.08.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2023.08.0
i386: ghcr.io/home-assistant/i386-homeassistant-base:2023.08.0
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2023.09.0
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2023.09.0
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2023.09.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2023.09.0
i386: ghcr.io/home-assistant/i386-homeassistant-base:2023.09.0
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io

View File

@ -7,6 +7,7 @@
"homekit",
"ibeacon",
"icloud",
"itunes"
"itunes",
"weatherkit"
]
}

View File

@ -1,5 +1,5 @@
{
"domain": "ikea",
"name": "IKEA",
"integrations": ["symfonisk", "tradfri"]
"integrations": ["symfonisk", "tradfri", "idasen_desk"]
}

View File

@ -0,0 +1,5 @@
{
"domain": "switchbot",
"name": "SwitchBot",
"integrations": ["switchbot", "switchbot_cloud"]
}

View File

@ -1,5 +1,5 @@
{
"domain": "u_tec",
"name": "U-tec",
"iot_standards": ["zwave"]
"integrations": ["ultraloq"]
}

View File

@ -125,6 +125,13 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
@property
def target_temperature(self) -> float | None:
"""Return the current target temperature."""
# If the system is in MyZone mode, and a zone is set, return that temperature instead.
if (
self._ac["myZone"] > 0
and not self._ac.get(ADVANTAGE_AIR_MYAUTO_ENABLED)
and not self._ac.get(ADVANTAGE_AIR_MYTEMP_ENABLED)
):
return self._myzone["setTemp"]
return self._ac["setTemp"]
@property

View File

@ -62,6 +62,12 @@ class AdvantageAirAcEntity(AdvantageAirEntity):
def _ac(self) -> dict[str, Any]:
return self.coordinator.data["aircons"][self.ac_key]["info"]
@property
def _myzone(self) -> dict[str, Any]:
return self.coordinator.data["aircons"][self.ac_key]["zones"].get(
f"z{self._ac['myZone']:02}"
)
class AdvantageAirZoneEntity(AdvantageAirAcEntity):
"""Parent class for Advantage Air Zone Entities."""

View File

@ -30,7 +30,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
longitude = entry.data[CONF_LONGITUDE]
station_updates = entry.options.get(CONF_STATION_UPDATES, True)
options = ConnectionOptions(api_key, station_updates)
options = ConnectionOptions(api_key, station_updates, True)
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
try:
await aemet.select_coordinates(latitude, longitude)

View File

@ -40,7 +40,7 @@ class AemetConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
await self.async_set_unique_id(f"{latitude}-{longitude}")
self._abort_if_unique_id_configured()
options = ConnectionOptions(user_input[CONF_API_KEY], False)
options = ConnectionOptions(user_input[CONF_API_KEY], False, True)
aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options)
try:
await aemet.select_coordinates(latitude, longitude)

View File

@ -1,6 +1,19 @@
"""Constant values for the AEMET OpenData component."""
from __future__ import annotations
from aemet_opendata.const import (
AOD_COND_CLEAR_NIGHT,
AOD_COND_CLOUDY,
AOD_COND_FOG,
AOD_COND_LIGHTNING,
AOD_COND_LIGHTNING_RAINY,
AOD_COND_PARTLY_CLODUY,
AOD_COND_POURING,
AOD_COND_RAINY,
AOD_COND_SNOWY,
AOD_COND_SUNNY,
)
from homeassistant.components.weather import (
ATTR_CONDITION_CLEAR_NIGHT,
ATTR_CONDITION_CLOUDY,
@ -55,94 +68,16 @@ ATTR_API_WIND_MAX_SPEED = "wind-max-speed"
ATTR_API_WIND_SPEED = "wind-speed"
CONDITIONS_MAP = {
ATTR_CONDITION_CLEAR_NIGHT: {
"11n", # Despejado (de noche)
},
ATTR_CONDITION_CLOUDY: {
"14", # Nuboso
"14n", # Nuboso (de noche)
"15", # Muy nuboso
"15n", # Muy nuboso (de noche)
"16", # Cubierto
"16n", # Cubierto (de noche)
"17", # Nubes altas
"17n", # Nubes altas (de noche)
},
ATTR_CONDITION_FOG: {
"81", # Niebla
"81n", # Niebla (de noche)
"82", # Bruma - Neblina
"82n", # Bruma - Neblina (de noche)
},
ATTR_CONDITION_LIGHTNING: {
"51", # Intervalos nubosos con tormenta
"51n", # Intervalos nubosos con tormenta (de noche)
"52", # Nuboso con tormenta
"52n", # Nuboso con tormenta (de noche)
"53", # Muy nuboso con tormenta
"53n", # Muy nuboso con tormenta (de noche)
"54", # Cubierto con tormenta
"54n", # Cubierto con tormenta (de noche)
},
ATTR_CONDITION_LIGHTNING_RAINY: {
"61", # Intervalos nubosos con tormenta y lluvia escasa
"61n", # Intervalos nubosos con tormenta y lluvia escasa (de noche)
"62", # Nuboso con tormenta y lluvia escasa
"62n", # Nuboso con tormenta y lluvia escasa (de noche)
"63", # Muy nuboso con tormenta y lluvia escasa
"63n", # Muy nuboso con tormenta y lluvia escasa (de noche)
"64", # Cubierto con tormenta y lluvia escasa
"64n", # Cubierto con tormenta y lluvia escasa (de noche)
},
ATTR_CONDITION_PARTLYCLOUDY: {
"12", # Poco nuboso
"12n", # Poco nuboso (de noche)
"13", # Intervalos nubosos
"13n", # Intervalos nubosos (de noche)
},
ATTR_CONDITION_POURING: {
"27", # Chubascos
"27n", # Chubascos (de noche)
},
ATTR_CONDITION_RAINY: {
"23", # Intervalos nubosos con lluvia
"23n", # Intervalos nubosos con lluvia (de noche)
"24", # Nuboso con lluvia
"24n", # Nuboso con lluvia (de noche)
"25", # Muy nuboso con lluvia
"25n", # Muy nuboso con lluvia (de noche)
"26", # Cubierto con lluvia
"26n", # Cubierto con lluvia (de noche)
"43", # Intervalos nubosos con lluvia escasa
"43n", # Intervalos nubosos con lluvia escasa (de noche)
"44", # Nuboso con lluvia escasa
"44n", # Nuboso con lluvia escasa (de noche)
"45", # Muy nuboso con lluvia escasa
"45n", # Muy nuboso con lluvia escasa (de noche)
"46", # Cubierto con lluvia escasa
"46n", # Cubierto con lluvia escasa (de noche)
},
ATTR_CONDITION_SNOWY: {
"33", # Intervalos nubosos con nieve
"33n", # Intervalos nubosos con nieve (de noche)
"34", # Nuboso con nieve
"34n", # Nuboso con nieve (de noche)
"35", # Muy nuboso con nieve
"35n", # Muy nuboso con nieve (de noche)
"36", # Cubierto con nieve
"36n", # Cubierto con nieve (de noche)
"71", # Intervalos nubosos con nieve escasa
"71n", # Intervalos nubosos con nieve escasa (de noche)
"72", # Nuboso con nieve escasa
"72n", # Nuboso con nieve escasa (de noche)
"73", # Muy nuboso con nieve escasa
"73n", # Muy nuboso con nieve escasa (de noche)
"74", # Cubierto con nieve escasa
"74n", # Cubierto con nieve escasa (de noche)
},
ATTR_CONDITION_SUNNY: {
"11", # Despejado
},
AOD_COND_CLEAR_NIGHT: ATTR_CONDITION_CLEAR_NIGHT,
AOD_COND_CLOUDY: ATTR_CONDITION_CLOUDY,
AOD_COND_FOG: ATTR_CONDITION_FOG,
AOD_COND_LIGHTNING: ATTR_CONDITION_LIGHTNING,
AOD_COND_LIGHTNING_RAINY: ATTR_CONDITION_LIGHTNING_RAINY,
AOD_COND_PARTLY_CLODUY: ATTR_CONDITION_PARTLYCLOUDY,
AOD_COND_POURING: ATTR_CONDITION_POURING,
AOD_COND_RAINY: ATTR_CONDITION_RAINY,
AOD_COND_SNOWY: ATTR_CONDITION_SNOWY,
AOD_COND_SUNNY: ATTR_CONDITION_SUNNY,
}
FORECAST_MONITORED_CONDITIONS = [
@ -187,16 +122,3 @@ FORECAST_MODE_ATTR_API = {
FORECAST_MODE_DAILY: ATTR_API_FORECAST_DAILY,
FORECAST_MODE_HOURLY: ATTR_API_FORECAST_HOURLY,
}
WIND_BEARING_MAP = {
"C": None,
"N": 0.0,
"NE": 45.0,
"E": 90.0,
"SE": 135.0,
"S": 180.0,
"SO": 225.0,
"O": 270.0,
"NO": 315.0,
}

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/aemet",
"iot_class": "cloud_polling",
"loggers": ["aemet_opendata"],
"requirements": ["AEMET-OpenData==0.4.4"]
"requirements": ["AEMET-OpenData==0.4.5"]
}

View File

@ -30,6 +30,7 @@ from .const import (
ATTR_API_FORECAST_TEMP_LOW,
ATTR_API_FORECAST_TIME,
ATTR_API_FORECAST_WIND_BEARING,
ATTR_API_FORECAST_WIND_MAX_SPEED,
ATTR_API_FORECAST_WIND_SPEED,
ATTR_API_HUMIDITY,
ATTR_API_PRESSURE,
@ -99,6 +100,12 @@ FORECAST_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
name="Wind bearing",
native_unit_of_measurement=DEGREE,
),
SensorEntityDescription(
key=ATTR_API_FORECAST_WIND_MAX_SPEED,
name="Wind max speed",
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
device_class=SensorDeviceClass.WIND_SPEED,
),
SensorEntityDescription(
key=ATTR_API_FORECAST_WIND_SPEED,
name="Wind speed",
@ -206,13 +213,14 @@ WEATHER_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
name="Wind max speed",
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
device_class=SensorDeviceClass.WIND_SPEED,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_API_WIND_SPEED,
name="Wind speed",
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.WIND_SPEED,
state_class=SensorStateClass.MEASUREMENT,
),
)

View File

@ -42,6 +42,7 @@ from .const import (
ATTR_API_PRESSURE,
ATTR_API_TEMPERATURE,
ATTR_API_WIND_BEARING,
ATTR_API_WIND_MAX_SPEED,
ATTR_API_WIND_SPEED,
ATTRIBUTION,
DOMAIN,
@ -193,6 +194,11 @@ class AemetWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordinator]):
"""Return the wind bearing."""
return self.coordinator.data[ATTR_API_WIND_BEARING]
@property
def native_wind_gust_speed(self):
"""Return the wind gust speed in native units."""
return self.coordinator.data[ATTR_API_WIND_MAX_SPEED]
@property
def native_wind_speed(self):
"""Return the wind speed."""

View File

@ -34,6 +34,7 @@ from aemet_opendata.const import (
ATTR_DATA,
)
from aemet_opendata.exceptions import AemetError
from aemet_opendata.forecast import ForecastValue
from aemet_opendata.helpers import (
get_forecast_day_value,
get_forecast_hour_value,
@ -78,7 +79,6 @@ from .const import (
ATTR_API_WIND_SPEED,
CONDITIONS_MAP,
DOMAIN,
WIND_BEARING_MAP,
)
_LOGGER = logging.getLogger(__name__)
@ -90,11 +90,8 @@ WEATHER_UPDATE_INTERVAL = timedelta(minutes=10)
def format_condition(condition: str) -> str:
"""Return condition from dict CONDITIONS_MAP."""
for key, value in CONDITIONS_MAP.items():
if condition in value:
return key
_LOGGER.error('Condition "%s" not found in CONDITIONS_MAP', condition)
return condition
val = ForecastValue.parse_condition(condition)
return CONDITIONS_MAP.get(val, val)
def format_float(value) -> float | None:
@ -489,10 +486,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
val = get_forecast_hour_value(
day_data[AEMET_ATTR_WIND_GUST], hour, key=AEMET_ATTR_DIRECTION
)[0]
if val in WIND_BEARING_MAP:
return WIND_BEARING_MAP[val]
_LOGGER.error("%s not found in Wind Bearing map", val)
return None
return ForecastValue.parse_wind_direction(val)
@staticmethod
def _get_wind_bearing_day(day_data):
@ -500,10 +494,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
val = get_forecast_day_value(
day_data[AEMET_ATTR_WIND], key=AEMET_ATTR_DIRECTION
)
if val in WIND_BEARING_MAP:
return WIND_BEARING_MAP[val]
_LOGGER.error("%s not found in Wind Bearing map", val)
return None
return ForecastValue.parse_wind_direction(val)
@staticmethod
def _get_wind_max_speed(day_data, hour):

View File

@ -1 +1,42 @@
"""The aftership component."""
"""The AfterShip integration."""
from __future__ import annotations
from pyaftership import AfterShip, AfterShipException
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up AfterShip from a config entry."""
hass.data.setdefault(DOMAIN, {})
session = async_get_clientsession(hass)
aftership = AfterShip(api_key=entry.data[CONF_API_KEY], session=session)
try:
await aftership.trackings.list()
except AfterShipException as err:
raise ConfigEntryNotReady from err
hass.data[DOMAIN][entry.entry_id] = aftership
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok

View File

@ -0,0 +1,90 @@
"""Config flow for AfterShip integration."""
from __future__ import annotations
import logging
from typing import Any
from pyaftership import AfterShip, AfterShipException
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow
from homeassistant.const import CONF_API_KEY, CONF_NAME
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN
from homeassistant.data_entry_flow import AbortFlow, FlowResult
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class AfterShipConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for AfterShip."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match({CONF_API_KEY: user_input[CONF_API_KEY]})
try:
aftership = AfterShip(
api_key=user_input[CONF_API_KEY],
session=async_get_clientsession(self.hass),
)
await aftership.trackings.list()
except AfterShipException:
_LOGGER.exception("Aftership raised exception")
errors["base"] = "cannot_connect"
else:
return self.async_create_entry(title="AfterShip", data=user_input)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
errors=errors,
)
async def async_step_import(self, config: dict[str, Any]) -> FlowResult:
"""Import configuration from yaml."""
try:
self._async_abort_entries_match({CONF_API_KEY: config[CONF_API_KEY]})
except AbortFlow as err:
async_create_issue(
self.hass,
DOMAIN,
"deprecated_yaml_import_issue_already_configured",
breaks_in_ha_version="2024.4.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml_import_issue_already_configured",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "AfterShip",
},
)
raise err
async_create_issue(
self.hass,
HOMEASSISTANT_DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2024.4.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "AfterShip",
},
)
return self.async_create_entry(
title=config.get(CONF_NAME, "AfterShip"),
data={CONF_API_KEY: config[CONF_API_KEY]},
)

View File

@ -2,6 +2,7 @@
"domain": "aftership",
"name": "AfterShip",
"codeowners": [],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aftership",
"iot_class": "cloud_polling",
"requirements": ["pyaftership==21.11.0"]

View File

@ -11,6 +11,7 @@ from homeassistant.components.sensor import (
PLATFORM_SCHEMA as BASE_PLATFORM_SCHEMA,
SensorEntity,
)
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_NAME
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers.aiohttp_client import async_get_clientsession
@ -20,6 +21,7 @@ from homeassistant.helpers.dispatcher import (
async_dispatcher_send,
)
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import Throttle
@ -58,19 +60,43 @@ async def async_setup_platform(
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the AfterShip sensor platform."""
apikey = config[CONF_API_KEY]
name = config[CONF_NAME]
session = async_get_clientsession(hass)
aftership = AfterShip(api_key=apikey, session=session)
aftership = AfterShip(
api_key=config[CONF_API_KEY], session=async_get_clientsession(hass)
)
try:
await aftership.trackings.list()
except AfterShipException as err:
_LOGGER.error("No tracking data found. Check API key is correct: %s", err)
return
except AfterShipException:
async_create_issue(
hass,
DOMAIN,
"deprecated_yaml_import_issue_cannot_connect",
breaks_in_ha_version="2024.4.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml_import_issue_cannot_connect",
translation_placeholders={
"integration_title": "AfterShip",
"url": "/config/integrations/dashboard/add?domain=aftership",
},
)
async_add_entities([AfterShipSensor(aftership, name)], True)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
)
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up AfterShip sensor entities based on a config entry."""
aftership: AfterShip = hass.data[DOMAIN][config_entry.entry_id]
async_add_entities([AfterShipSensor(aftership, config_entry.title)], True)
async def handle_add_tracking(call: ServiceCall) -> None:
"""Call when a user adds a new Aftership tracking from Home Assistant."""

View File

@ -1,4 +1,19 @@
{
"config": {
"step": {
"user": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
}
},
"services": {
"add_tracking": {
"name": "Add tracking",
@ -32,5 +47,15 @@
}
}
}
},
"issues": {
"deprecated_yaml_import_issue_already_configured": {
"title": "The {integration_title} YAML configuration import failed",
"description": "Configuring {integration_title} using YAML is being removed but the YAML configuration was already imported.\n\nRemove the YAML configuration and restart Home Assistant."
},
"deprecated_yaml_import_issue_cannot_connect": {
"title": "The {integration_title} YAML configuration import failed",
"description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
}
}
}

View File

@ -1,15 +1,8 @@
"""The Airly integration."""
from __future__ import annotations
from asyncio import timeout
from datetime import timedelta
import logging
from math import ceil
from aiohttp import ClientSession
from aiohttp.client_exceptions import ClientConnectorError
from airly import Airly
from airly.exceptions import AirlyError
from homeassistant.components.air_quality import DOMAIN as AIR_QUALITY_PLATFORM
from homeassistant.config_entries import ConfigEntry
@ -17,53 +10,15 @@ from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, Pla
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from .const import (
ATTR_API_ADVICE,
ATTR_API_CAQI,
ATTR_API_CAQI_DESCRIPTION,
ATTR_API_CAQI_LEVEL,
CONF_USE_NEAREST,
DOMAIN,
MAX_UPDATE_INTERVAL,
MIN_UPDATE_INTERVAL,
NO_AIRLY_SENSORS,
)
from .const import CONF_USE_NEAREST, DOMAIN, MIN_UPDATE_INTERVAL
from .coordinator import AirlyDataUpdateCoordinator
PLATFORMS = [Platform.SENSOR]
_LOGGER = logging.getLogger(__name__)
def set_update_interval(instances_count: int, requests_remaining: int) -> timedelta:
"""Return data update interval.
The number of requests is reset at midnight UTC so we calculate the update
interval based on number of minutes until midnight, the number of Airly instances
and the number of remaining requests.
"""
now = dt_util.utcnow()
midnight = dt_util.find_next_time_expression_time(
now, seconds=[0], minutes=[0], hours=[0]
)
minutes_to_midnight = (midnight - now).total_seconds() / 60
interval = timedelta(
minutes=min(
max(
ceil(minutes_to_midnight / requests_remaining * instances_count),
MIN_UPDATE_INTERVAL,
),
MAX_UPDATE_INTERVAL,
)
)
_LOGGER.debug("Data will be update every %s", interval)
return interval
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Airly as config entry."""
api_key = entry.data[CONF_API_KEY]
@ -131,75 +86,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
class AirlyDataUpdateCoordinator(DataUpdateCoordinator):
"""Define an object to hold Airly data."""
def __init__(
self,
hass: HomeAssistant,
session: ClientSession,
api_key: str,
latitude: float,
longitude: float,
update_interval: timedelta,
use_nearest: bool,
) -> None:
"""Initialize."""
self.latitude = latitude
self.longitude = longitude
# Currently, Airly only supports Polish and English
language = "pl" if hass.config.language == "pl" else "en"
self.airly = Airly(api_key, session, language=language)
self.use_nearest = use_nearest
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
async def _async_update_data(self) -> dict[str, str | float | int]:
"""Update data via library."""
data: dict[str, str | float | int] = {}
if self.use_nearest:
measurements = self.airly.create_measurements_session_nearest(
self.latitude, self.longitude, max_distance_km=5
)
else:
measurements = self.airly.create_measurements_session_point(
self.latitude, self.longitude
)
async with timeout(20):
try:
await measurements.update()
except (AirlyError, ClientConnectorError) as error:
raise UpdateFailed(error) from error
_LOGGER.debug(
"Requests remaining: %s/%s",
self.airly.requests_remaining,
self.airly.requests_per_day,
)
# Airly API sometimes returns None for requests remaining so we update
# update_interval only if we have valid value.
if self.airly.requests_remaining:
self.update_interval = set_update_interval(
len(self.hass.config_entries.async_entries(DOMAIN)),
self.airly.requests_remaining,
)
values = measurements.current["values"]
index = measurements.current["indexes"][0]
standards = measurements.current["standards"]
if index["description"] == NO_AIRLY_SENSORS:
raise UpdateFailed("Can't retrieve data: no Airly sensors in this area")
for value in values:
data[value["name"]] = value["value"]
for standard in standards:
data[f"{standard['pollutant']}_LIMIT"] = standard["limit"]
data[f"{standard['pollutant']}_PERCENT"] = standard["percent"]
data[ATTR_API_CAQI] = index["value"]
data[ATTR_API_CAQI_LEVEL] = index["level"].lower().replace("_", " ")
data[ATTR_API_CAQI_DESCRIPTION] = index["description"]
data[ATTR_API_ADVICE] = index["advice"]
return data

View File

@ -0,0 +1,126 @@
"""DataUpdateCoordinator for the Airly integration."""
from asyncio import timeout
from datetime import timedelta
import logging
from math import ceil
from aiohttp import ClientSession
from aiohttp.client_exceptions import ClientConnectorError
from airly import Airly
from airly.exceptions import AirlyError
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from .const import (
ATTR_API_ADVICE,
ATTR_API_CAQI,
ATTR_API_CAQI_DESCRIPTION,
ATTR_API_CAQI_LEVEL,
DOMAIN,
MAX_UPDATE_INTERVAL,
MIN_UPDATE_INTERVAL,
NO_AIRLY_SENSORS,
)
_LOGGER = logging.getLogger(__name__)
def set_update_interval(instances_count: int, requests_remaining: int) -> timedelta:
"""Return data update interval.
The number of requests is reset at midnight UTC so we calculate the update
interval based on number of minutes until midnight, the number of Airly instances
and the number of remaining requests.
"""
now = dt_util.utcnow()
midnight = dt_util.find_next_time_expression_time(
now, seconds=[0], minutes=[0], hours=[0]
)
minutes_to_midnight = (midnight - now).total_seconds() / 60
interval = timedelta(
minutes=min(
max(
ceil(minutes_to_midnight / requests_remaining * instances_count),
MIN_UPDATE_INTERVAL,
),
MAX_UPDATE_INTERVAL,
)
)
_LOGGER.debug("Data will be update every %s", interval)
return interval
class AirlyDataUpdateCoordinator(DataUpdateCoordinator):
"""Define an object to hold Airly data."""
def __init__(
self,
hass: HomeAssistant,
session: ClientSession,
api_key: str,
latitude: float,
longitude: float,
update_interval: timedelta,
use_nearest: bool,
) -> None:
"""Initialize."""
self.latitude = latitude
self.longitude = longitude
# Currently, Airly only supports Polish and English
language = "pl" if hass.config.language == "pl" else "en"
self.airly = Airly(api_key, session, language=language)
self.use_nearest = use_nearest
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
async def _async_update_data(self) -> dict[str, str | float | int]:
"""Update data via library."""
data: dict[str, str | float | int] = {}
if self.use_nearest:
measurements = self.airly.create_measurements_session_nearest(
self.latitude, self.longitude, max_distance_km=5
)
else:
measurements = self.airly.create_measurements_session_point(
self.latitude, self.longitude
)
async with timeout(20):
try:
await measurements.update()
except (AirlyError, ClientConnectorError) as error:
raise UpdateFailed(error) from error
_LOGGER.debug(
"Requests remaining: %s/%s",
self.airly.requests_remaining,
self.airly.requests_per_day,
)
# Airly API sometimes returns None for requests remaining so we update
# update_interval only if we have valid value.
if self.airly.requests_remaining:
self.update_interval = set_update_interval(
len(self.hass.config_entries.async_entries(DOMAIN)),
self.airly.requests_remaining,
)
values = measurements.current["values"]
index = measurements.current["indexes"][0]
standards = measurements.current["standards"]
if index["description"] == NO_AIRLY_SENSORS:
raise UpdateFailed("Can't retrieve data: no Airly sensors in this area")
for value in values:
data[value["name"]] = value["value"]
for standard in standards:
data[f"{standard['pollutant']}_LIMIT"] = standard["limit"]
data[f"{standard['pollutant']}_PERCENT"] = standard["percent"]
data[ATTR_API_CAQI] = index["value"]
data[ATTR_API_CAQI_LEVEL] = index["level"].lower().replace("_", " ")
data[ATTR_API_CAQI_DESCRIPTION] = index["description"]
data[ATTR_API_ADVICE] = index["advice"]
return data

View File

@ -2,11 +2,6 @@
import datetime
import logging
from aiohttp.client_exceptions import ClientConnectorError
from pyairnow import WebServiceAPI
from pyairnow.conv import aqi_to_concentration
from pyairnow.errors import AirNowError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_API_KEY,
@ -17,26 +12,9 @@ from homeassistant.const import (
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
ATTR_API_AQI,
ATTR_API_AQI_DESCRIPTION,
ATTR_API_AQI_LEVEL,
ATTR_API_AQI_PARAM,
ATTR_API_CAT_DESCRIPTION,
ATTR_API_CAT_LEVEL,
ATTR_API_CATEGORY,
ATTR_API_PM25,
ATTR_API_POLLUTANT,
ATTR_API_REPORT_DATE,
ATTR_API_REPORT_HOUR,
ATTR_API_STATE,
ATTR_API_STATION,
ATTR_API_STATION_LATITUDE,
ATTR_API_STATION_LONGITUDE,
DOMAIN,
)
from .const import DOMAIN
from .coordinator import AirNowDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
PLATFORMS = [Platform.SENSOR]
@ -107,72 +85,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
class AirNowDataUpdateCoordinator(DataUpdateCoordinator):
"""Define an object to hold Airly data."""
def __init__(
self, hass, session, api_key, latitude, longitude, distance, update_interval
):
"""Initialize."""
self.latitude = latitude
self.longitude = longitude
self.distance = distance
self.airnow = WebServiceAPI(api_key, session=session)
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
async def _async_update_data(self):
"""Update data via library."""
data = {}
try:
obs = await self.airnow.observations.latLong(
self.latitude,
self.longitude,
distance=self.distance,
)
except (AirNowError, ClientConnectorError) as error:
raise UpdateFailed(error) from error
if not obs:
raise UpdateFailed("No data was returned from AirNow")
max_aqi = 0
max_aqi_level = 0
max_aqi_desc = ""
max_aqi_poll = ""
for obv in obs:
# Convert AQIs to Concentration
pollutant = obv[ATTR_API_AQI_PARAM]
concentration = aqi_to_concentration(obv[ATTR_API_AQI], pollutant)
data[obv[ATTR_API_AQI_PARAM]] = concentration
# Overall AQI is the max of all pollutant AQIs
if obv[ATTR_API_AQI] > max_aqi:
max_aqi = obv[ATTR_API_AQI]
max_aqi_level = obv[ATTR_API_CATEGORY][ATTR_API_CAT_LEVEL]
max_aqi_desc = obv[ATTR_API_CATEGORY][ATTR_API_CAT_DESCRIPTION]
max_aqi_poll = pollutant
# Copy other data from PM2.5 Value
if obv[ATTR_API_AQI_PARAM] == ATTR_API_PM25:
# Copy Report Details
data[ATTR_API_REPORT_DATE] = obv[ATTR_API_REPORT_DATE]
data[ATTR_API_REPORT_HOUR] = obv[ATTR_API_REPORT_HOUR]
# Copy Station Details
data[ATTR_API_STATE] = obv[ATTR_API_STATE]
data[ATTR_API_STATION] = obv[ATTR_API_STATION]
data[ATTR_API_STATION_LATITUDE] = obv[ATTR_API_STATION_LATITUDE]
data[ATTR_API_STATION_LONGITUDE] = obv[ATTR_API_STATION_LONGITUDE]
# Store Overall AQI
data[ATTR_API_AQI] = max_aqi
data[ATTR_API_AQI_LEVEL] = max_aqi_level
data[ATTR_API_AQI_DESCRIPTION] = max_aqi_desc
data[ATTR_API_POLLUTANT] = max_aqi_poll
return data

View File

@ -0,0 +1,99 @@
"""DataUpdateCoordinator for the AirNow integration."""
import logging
from aiohttp.client_exceptions import ClientConnectorError
from pyairnow import WebServiceAPI
from pyairnow.conv import aqi_to_concentration
from pyairnow.errors import AirNowError
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
ATTR_API_AQI,
ATTR_API_AQI_DESCRIPTION,
ATTR_API_AQI_LEVEL,
ATTR_API_AQI_PARAM,
ATTR_API_CAT_DESCRIPTION,
ATTR_API_CAT_LEVEL,
ATTR_API_CATEGORY,
ATTR_API_PM25,
ATTR_API_POLLUTANT,
ATTR_API_REPORT_DATE,
ATTR_API_REPORT_HOUR,
ATTR_API_STATE,
ATTR_API_STATION,
ATTR_API_STATION_LATITUDE,
ATTR_API_STATION_LONGITUDE,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
class AirNowDataUpdateCoordinator(DataUpdateCoordinator):
"""The AirNow update coordinator."""
def __init__(
self, hass, session, api_key, latitude, longitude, distance, update_interval
):
"""Initialize."""
self.latitude = latitude
self.longitude = longitude
self.distance = distance
self.airnow = WebServiceAPI(api_key, session=session)
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
async def _async_update_data(self):
"""Update data via library."""
data = {}
try:
obs = await self.airnow.observations.latLong(
self.latitude,
self.longitude,
distance=self.distance,
)
except (AirNowError, ClientConnectorError) as error:
raise UpdateFailed(error) from error
if not obs:
raise UpdateFailed("No data was returned from AirNow")
max_aqi = 0
max_aqi_level = 0
max_aqi_desc = ""
max_aqi_poll = ""
for obv in obs:
# Convert AQIs to Concentration
pollutant = obv[ATTR_API_AQI_PARAM]
concentration = aqi_to_concentration(obv[ATTR_API_AQI], pollutant)
data[obv[ATTR_API_AQI_PARAM]] = concentration
# Overall AQI is the max of all pollutant AQIs
if obv[ATTR_API_AQI] > max_aqi:
max_aqi = obv[ATTR_API_AQI]
max_aqi_level = obv[ATTR_API_CATEGORY][ATTR_API_CAT_LEVEL]
max_aqi_desc = obv[ATTR_API_CATEGORY][ATTR_API_CAT_DESCRIPTION]
max_aqi_poll = pollutant
# Copy other data from PM2.5 Value
if obv[ATTR_API_AQI_PARAM] == ATTR_API_PM25:
# Copy Report Details
data[ATTR_API_REPORT_DATE] = obv[ATTR_API_REPORT_DATE]
data[ATTR_API_REPORT_HOUR] = obv[ATTR_API_REPORT_HOUR]
# Copy Station Details
data[ATTR_API_STATE] = obv[ATTR_API_STATE]
data[ATTR_API_STATION] = obv[ATTR_API_STATION]
data[ATTR_API_STATION_LATITUDE] = obv[ATTR_API_STATION_LATITUDE]
data[ATTR_API_STATION_LONGITUDE] = obv[ATTR_API_STATION_LONGITUDE]
# Store Overall AQI
data[ATTR_API_AQI] = max_aqi
data[ATTR_API_AQI_LEVEL] = max_aqi_level
data[ATTR_API_AQI_DESCRIPTION] = max_aqi_desc
data[ATTR_API_POLLUTANT] = max_aqi_poll
return data

View File

@ -19,7 +19,7 @@
"service_uuid": "b42e3882-ade7-11e4-89d3-123b93f75cba"
}
],
"codeowners": ["@vincegio"],
"codeowners": ["@vincegio", "@LaStrada"],
"config_flow": true,
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",

View File

@ -1,19 +1,13 @@
"""The AirTouch4 integration."""
import logging
from airtouch4pyapi import AirTouch
from airtouch4pyapi.airtouch import AirTouchStatus
from homeassistant.components.climate import SCAN_INTERVAL
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
from .coordinator import AirtouchDataUpdateCoordinator
PLATFORMS = [Platform.CLIMATE]
@ -44,38 +38,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Airtouch data."""
def __init__(self, hass, airtouch):
"""Initialize global Airtouch data updater."""
self.airtouch = airtouch
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
)
async def _async_update_data(self):
"""Fetch data from Airtouch."""
await self.airtouch.UpdateInfo()
if self.airtouch.Status != AirTouchStatus.OK:
raise UpdateFailed("Airtouch connection issue")
return {
"acs": [
{"ac_number": ac.AcNumber, "is_on": ac.IsOn}
for ac in self.airtouch.GetAcs()
],
"groups": [
{
"group_number": group.GroupNumber,
"group_name": group.GroupName,
"is_on": group.IsOn,
}
for group in self.airtouch.GetGroups()
],
}

View File

@ -0,0 +1,46 @@
"""DataUpdateCoordinator for the airtouch integration."""
import logging
from airtouch4pyapi.airtouch import AirTouchStatus
from homeassistant.components.climate import SCAN_INTERVAL
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Airtouch data."""
def __init__(self, hass, airtouch):
"""Initialize global Airtouch data updater."""
self.airtouch = airtouch
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
)
async def _async_update_data(self):
"""Fetch data from Airtouch."""
await self.airtouch.UpdateInfo()
if self.airtouch.Status != AirTouchStatus.OK:
raise UpdateFailed("Airtouch connection issue")
return {
"acs": [
{"ac_number": ac.AcNumber, "is_on": ac.IsOn}
for ac in self.airtouch.GetAcs()
],
"groups": [
{
"group_number": group.GroupNumber,
"group_name": group.GroupName,
"is_on": group.IsOn,
}
for group in self.airtouch.GetGroups()
],
}

View File

@ -1,7 +1,7 @@
{
"domain": "airtouch4",
"name": "AirTouch 4",
"codeowners": [],
"codeowners": ["@samsinnamon"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/airtouch4",
"iot_class": "local_polling",

View File

@ -421,8 +421,10 @@ class AirVisualEntity(CoordinatorEntity):
self._entry = entry
self.entity_description = description
# pylint: disable-next=hass-missing-super-call
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
await super().async_added_to_hass()
@callback
def update() -> None:

View File

@ -24,6 +24,7 @@ PLATFORMS: list[Platform] = [
Platform.CLIMATE,
Platform.SELECT,
Platform.SENSOR,
Platform.WATER_HEATER,
]
_LOGGER = logging.getLogger(__name__)

View File

@ -106,6 +106,22 @@ class AirzoneHotWaterEntity(AirzoneEntity):
"""Return DHW value by key."""
return self.coordinator.data[AZD_HOT_WATER].get(key)
async def _async_update_dhw_params(self, params: dict[str, Any]) -> None:
"""Send DHW parameters to API."""
_params = {
API_SYSTEM_ID: 0,
**params,
}
_LOGGER.debug("update_dhw_params=%s", _params)
try:
await self.coordinator.airzone.set_dhw_parameters(_params)
except AirzoneError as error:
raise HomeAssistantError(
f"Failed to set dhw {self.name}: {error}"
) from error
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
class AirzoneWebServerEntity(AirzoneEntity):
"""Define an Airzone WebServer entity."""

View File

@ -0,0 +1,131 @@
"""Support for the Airzone water heater."""
from __future__ import annotations
from typing import Any, Final
from aioairzone.common import HotWaterOperation
from aioairzone.const import (
API_ACS_ON,
API_ACS_POWER_MODE,
API_ACS_SET_POINT,
AZD_HOT_WATER,
AZD_NAME,
AZD_OPERATION,
AZD_OPERATIONS,
AZD_TEMP,
AZD_TEMP_MAX,
AZD_TEMP_MIN,
AZD_TEMP_SET,
AZD_TEMP_UNIT,
)
from homeassistant.components.water_heater import (
STATE_ECO,
STATE_PERFORMANCE,
WaterHeaterEntity,
WaterHeaterEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_TEMPERATURE, STATE_OFF
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import DOMAIN, TEMP_UNIT_LIB_TO_HASS
from .coordinator import AirzoneUpdateCoordinator
from .entity import AirzoneHotWaterEntity
OPERATION_LIB_TO_HASS: Final[dict[HotWaterOperation, str]] = {
HotWaterOperation.Off: STATE_OFF,
HotWaterOperation.On: STATE_ECO,
HotWaterOperation.Powerful: STATE_PERFORMANCE,
}
OPERATION_MODE_TO_DHW_PARAMS: Final[dict[str, dict[str, Any]]] = {
STATE_OFF: {
API_ACS_ON: 0,
},
STATE_ECO: {
API_ACS_ON: 1,
API_ACS_POWER_MODE: 0,
},
STATE_PERFORMANCE: {
API_ACS_ON: 1,
API_ACS_POWER_MODE: 1,
},
}
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Add Airzone sensors from a config_entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
if AZD_HOT_WATER in coordinator.data:
async_add_entities([AirzoneWaterHeater(coordinator, entry)])
class AirzoneWaterHeater(AirzoneHotWaterEntity, WaterHeaterEntity):
"""Define an Airzone Water Heater."""
_attr_supported_features = (
WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE
)
def __init__(
self,
coordinator: AirzoneUpdateCoordinator,
entry: ConfigEntry,
) -> None:
"""Initialize Airzone water heater entity."""
super().__init__(coordinator, entry)
self._attr_name = self.get_airzone_value(AZD_NAME)
self._attr_unique_id = f"{self._attr_unique_id}_dhw"
self._attr_operation_list = [
OPERATION_LIB_TO_HASS[operation]
for operation in self.get_airzone_value(AZD_OPERATIONS)
]
self._attr_temperature_unit = TEMP_UNIT_LIB_TO_HASS[
self.get_airzone_value(AZD_TEMP_UNIT)
]
self._async_update_attrs()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the water heater off."""
await self._async_update_dhw_params({API_ACS_ON: 0})
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the water heater off."""
await self._async_update_dhw_params({API_ACS_ON: 1})
async def async_set_operation_mode(self, operation_mode: str) -> None:
"""Set new target operation mode."""
params = OPERATION_MODE_TO_DHW_PARAMS.get(operation_mode, {})
await self._async_update_dhw_params(params)
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
params: dict[str, Any] = {}
if ATTR_TEMPERATURE in kwargs:
params[API_ACS_SET_POINT] = kwargs[ATTR_TEMPERATURE]
await self._async_update_dhw_params(params)
@callback
def _handle_coordinator_update(self) -> None:
"""Update attributes when the coordinator updates."""
self._async_update_attrs()
super()._handle_coordinator_update()
@callback
def _async_update_attrs(self) -> None:
"""Update water heater attributes."""
self._attr_current_temperature = self.get_airzone_value(AZD_TEMP)
self._attr_current_operation = OPERATION_LIB_TO_HASS[
self.get_airzone_value(AZD_OPERATION)
]
self._attr_max_temp = self.get_airzone_value(AZD_TEMP_MAX)
self._attr_min_temp = self.get_airzone_value(AZD_TEMP_MIN)
self._attr_target_temperature = self.get_airzone_value(AZD_TEMP_SET)

View File

@ -14,6 +14,7 @@ from .coordinator import AirzoneUpdateCoordinator
PLATFORMS: list[Platform] = [
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.SENSOR,
]

View File

@ -0,0 +1,208 @@
"""Support for the Airzone Cloud climate."""
from __future__ import annotations
from typing import Any, Final
from aioairzone_cloud.common import OperationAction, OperationMode, TemperatureUnit
from aioairzone_cloud.const import (
API_MODE,
API_OPTS,
API_POWER,
API_SETPOINT,
API_UNITS,
API_VALUE,
AZD_ACTION,
AZD_HUMIDITY,
AZD_MASTER,
AZD_MODE,
AZD_MODES,
AZD_POWER,
AZD_TEMP,
AZD_TEMP_SET,
AZD_TEMP_SET_MAX,
AZD_TEMP_SET_MIN,
AZD_TEMP_STEP,
AZD_ZONES,
)
from homeassistant.components.climate import (
ClimateEntity,
ClimateEntityFeature,
HVACAction,
HVACMode,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import DOMAIN
from .coordinator import AirzoneUpdateCoordinator
from .entity import AirzoneEntity, AirzoneZoneEntity
HVAC_ACTION_LIB_TO_HASS: Final[dict[OperationAction, HVACAction]] = {
OperationAction.COOLING: HVACAction.COOLING,
OperationAction.DRYING: HVACAction.DRYING,
OperationAction.FAN: HVACAction.FAN,
OperationAction.HEATING: HVACAction.HEATING,
OperationAction.IDLE: HVACAction.IDLE,
OperationAction.OFF: HVACAction.OFF,
}
HVAC_MODE_LIB_TO_HASS: Final[dict[OperationMode, HVACMode]] = {
OperationMode.STOP: HVACMode.OFF,
OperationMode.COOLING: HVACMode.COOL,
OperationMode.COOLING_AIR: HVACMode.COOL,
OperationMode.COOLING_RADIANT: HVACMode.COOL,
OperationMode.COOLING_COMBINED: HVACMode.COOL,
OperationMode.HEATING: HVACMode.HEAT,
OperationMode.HEAT_AIR: HVACMode.HEAT,
OperationMode.HEAT_RADIANT: HVACMode.HEAT,
OperationMode.HEAT_COMBINED: HVACMode.HEAT,
OperationMode.EMERGENCY_HEAT: HVACMode.HEAT,
OperationMode.VENTILATION: HVACMode.FAN_ONLY,
OperationMode.DRY: HVACMode.DRY,
OperationMode.AUTO: HVACMode.HEAT_COOL,
}
HVAC_MODE_HASS_TO_LIB: Final[dict[HVACMode, OperationMode]] = {
HVACMode.OFF: OperationMode.STOP,
HVACMode.COOL: OperationMode.COOLING,
HVACMode.HEAT: OperationMode.HEATING,
HVACMode.FAN_ONLY: OperationMode.VENTILATION,
HVACMode.DRY: OperationMode.DRY,
HVACMode.HEAT_COOL: OperationMode.AUTO,
}
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Add Airzone climate from a config_entry."""
coordinator: AirzoneUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
entities: list[AirzoneClimate] = []
# Zones
for zone_id, zone_data in coordinator.data.get(AZD_ZONES, {}).items():
entities.append(
AirzoneZoneClimate(
coordinator,
zone_id,
zone_data,
)
)
async_add_entities(entities)
class AirzoneClimate(AirzoneEntity, ClimateEntity):
"""Define an Airzone Cloud climate."""
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
_attr_temperature_unit = UnitOfTemperature.CELSIUS
async def async_turn_on(self) -> None:
"""Turn the entity on."""
params = {
API_POWER: {
API_VALUE: True,
},
}
await self._async_update_params(params)
async def async_turn_off(self) -> None:
"""Turn the entity off."""
params = {
API_POWER: {
API_VALUE: False,
},
}
await self._async_update_params(params)
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
params: dict[str, Any] = {}
if ATTR_TEMPERATURE in kwargs:
params[API_SETPOINT] = {
API_VALUE: kwargs[ATTR_TEMPERATURE],
API_OPTS: {
API_UNITS: TemperatureUnit.CELSIUS.value,
},
}
await self._async_update_params(params)
@callback
def _handle_coordinator_update(self) -> None:
"""Update attributes when the coordinator updates."""
self._async_update_attrs()
super()._handle_coordinator_update()
@callback
def _async_update_attrs(self) -> None:
"""Update climate attributes."""
self._attr_current_temperature = self.get_airzone_value(AZD_TEMP)
self._attr_current_humidity = self.get_airzone_value(AZD_HUMIDITY)
self._attr_hvac_action = HVAC_ACTION_LIB_TO_HASS[
self.get_airzone_value(AZD_ACTION)
]
if self.get_airzone_value(AZD_POWER):
self._attr_hvac_mode = HVAC_MODE_LIB_TO_HASS[
self.get_airzone_value(AZD_MODE)
]
else:
self._attr_hvac_mode = HVACMode.OFF
self._attr_max_temp = self.get_airzone_value(AZD_TEMP_SET_MAX)
self._attr_min_temp = self.get_airzone_value(AZD_TEMP_SET_MIN)
self._attr_target_temperature = self.get_airzone_value(AZD_TEMP_SET)
class AirzoneZoneClimate(AirzoneZoneEntity, AirzoneClimate):
"""Define an Airzone Cloud Zone climate."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: AirzoneUpdateCoordinator,
system_zone_id: str,
zone_data: dict,
) -> None:
"""Initialize Airzone Cloud Zone climate."""
super().__init__(coordinator, system_zone_id, zone_data)
self._attr_unique_id = system_zone_id
self._attr_target_temperature_step = self.get_airzone_value(AZD_TEMP_STEP)
self._attr_hvac_modes = [
HVAC_MODE_LIB_TO_HASS[mode] for mode in self.get_airzone_value(AZD_MODES)
]
if HVACMode.OFF not in self._attr_hvac_modes:
self._attr_hvac_modes += [HVACMode.OFF]
self._async_update_attrs()
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set hvac mode."""
slave_raise = False
params: dict[str, Any] = {}
if hvac_mode == HVACMode.OFF:
params[API_POWER] = {
API_VALUE: False,
}
else:
mode = HVAC_MODE_HASS_TO_LIB[hvac_mode]
if mode != self.get_airzone_value(AZD_MODE):
if self.get_airzone_value(AZD_MASTER):
params[API_MODE] = {
API_VALUE: mode.value,
}
else:
slave_raise = True
params[API_POWER] = {
API_VALUE: True,
}
await self._async_update_params(params)
if slave_raise:
raise HomeAssistantError(f"Mode can't be changed on slave zone {self.name}")

View File

@ -2,6 +2,7 @@
from __future__ import annotations
from abc import ABC, abstractmethod
import logging
from typing import Any
from aioairzone_cloud.const import (
@ -15,7 +16,9 @@ from aioairzone_cloud.const import (
AZD_WEBSERVERS,
AZD_ZONES,
)
from aioairzone_cloud.exceptions import AirzoneCloudError
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
@ -23,6 +26,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import AirzoneUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
class AirzoneEntity(CoordinatorEntity[AirzoneUpdateCoordinator], ABC):
"""Define an Airzone Cloud entity."""
@ -36,6 +41,10 @@ class AirzoneEntity(CoordinatorEntity[AirzoneUpdateCoordinator], ABC):
def get_airzone_value(self, key: str) -> Any:
"""Return Airzone Cloud entity value by key."""
async def _async_update_params(self, params: dict[str, Any]) -> None:
"""Send Airzone parameters to Cloud API."""
raise NotImplementedError
class AirzoneAidooEntity(AirzoneEntity):
"""Define an Airzone Cloud Aidoo entity."""
@ -153,3 +162,15 @@ class AirzoneZoneEntity(AirzoneEntity):
if zone := self.coordinator.data[AZD_ZONES].get(self.zone_id):
value = zone.get(key)
return value
async def _async_update_params(self, params: dict[str, Any]) -> None:
"""Send Zone parameters to Cloud API."""
_LOGGER.debug("zone=%s: update_params=%s", self.name, params)
try:
await self.coordinator.airzone.api_set_zone_id_params(self.zone_id, params)
except AirzoneCloudError as error:
raise HomeAssistantError(
f"Failed to set {self.name} params: {error}"
) from error
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
"iot_class": "cloud_polling",
"loggers": ["aioairzone_cloud"],
"requirements": ["aioairzone-cloud==0.2.1"]
"requirements": ["aioairzone-cloud==0.2.3"]
}

View File

@ -10,7 +10,7 @@ from homeassistant.components.cover import CoverDeviceClass, CoverEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPENING
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import PlatformNotReady
from homeassistant.exceptions import HomeAssistantError, PlatformNotReady
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
@ -75,11 +75,13 @@ class AladdinDevice(CoverEntity):
async def async_close_cover(self, **kwargs: Any) -> None:
"""Issue close command to cover."""
await self._acc.close_door(self._device_id, self._number)
if not await self._acc.close_door(self._device_id, self._number):
raise HomeAssistantError("Aladdin Connect API failed to close the cover")
async def async_open_cover(self, **kwargs: Any) -> None:
"""Issue open command to cover."""
await self._acc.open_door(self._device_id, self._number)
if not await self._acc.open_door(self._device_id, self._number):
raise HomeAssistantError("Aladdin Connect API failed to open the cover")
async def async_update(self) -> None:
"""Update status of cover."""

View File

@ -0,0 +1,29 @@
"""Diagnostics support for Aladdin Connect."""
from __future__ import annotations
from typing import Any
from AIOAladdinConnect import AladdinConnectClient
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .const import DOMAIN
TO_REDACT = {"serial", "device_id"}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant,
config_entry: ConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
acc: AladdinConnectClient = hass.data[DOMAIN][config_entry.entry_id]
diagnostics_data = {
"doors": async_redact_data(acc.doors, TO_REDACT),
}
return diagnostics_data

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
"iot_class": "cloud_polling",
"loggers": ["aladdin_connect"],
"requirements": ["AIOAladdinConnect==0.1.57"]
"requirements": ["AIOAladdinConnect==0.1.58"]
}

View File

@ -707,7 +707,8 @@ class MediaPlayerCapabilities(AlexaEntity):
# AlexaEqualizerController is disabled for denonavr
# since it blocks alexa from discovering any devices.
domain = entity_sources(self.hass).get(self.entity_id, {}).get("domain")
entity_info = entity_sources(self.hass).get(self.entity_id)
domain = entity_info["domain"] if entity_info else None
if (
supported & media_player.MediaPlayerEntityFeature.SELECT_SOUND_MODE
and domain != "denonavr"

View File

@ -378,8 +378,9 @@ async def async_send_changereport_message(
response_text = await response.text()
_LOGGER.debug("Sent: %s", json.dumps(message_serialized))
_LOGGER.debug("Received (%s): %s", response.status, response_text)
if _LOGGER.isEnabledFor(logging.DEBUG):
_LOGGER.debug("Sent: %s", json.dumps(message_serialized))
_LOGGER.debug("Received (%s): %s", response.status, response_text)
if response.status == HTTPStatus.ACCEPTED:
return
@ -531,8 +532,9 @@ async def async_send_doorbell_event_message(
response_text = await response.text()
_LOGGER.debug("Sent: %s", json.dumps(message_serialized))
_LOGGER.debug("Received (%s): %s", response.status, response_text)
if _LOGGER.isEnabledFor(logging.DEBUG):
_LOGGER.debug("Sent: %s", json.dumps(message_serialized))
_LOGGER.debug("Received (%s): %s", response.status, response_text)
if response.status == HTTPStatus.ACCEPTED:
return

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/amcrest",
"iot_class": "local_polling",
"loggers": ["amcrest"],
"requirements": ["amcrest==1.9.7"]
"requirements": ["amcrest==1.9.8"]
}

View File

@ -8,8 +8,8 @@
"iot_class": "local_polling",
"loggers": ["adb_shell", "androidtv", "pure_python_adb"],
"requirements": [
"adb-shell[async]==0.4.3",
"androidtv[async]==0.0.70",
"adb-shell[async]==0.4.4",
"androidtv[async]==0.0.72",
"pure-python-adb[async]==0.3.0.dev0"
]
}

View File

@ -9,8 +9,8 @@ from anthemav.connection import Connection
from anthemav.device_error import DeviceError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, CONF_PORT
from homeassistant.config_entries import ConfigFlow
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT
from homeassistant.data_entry_flow import FlowResult
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import format_mac
@ -43,7 +43,7 @@ async def connect_device(user_input: dict[str, Any]) -> Connection:
return avr
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
class AnthemAVConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Anthem A/V Receivers."""
VERSION = 1
@ -57,9 +57,6 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
step_id="user", data_schema=STEP_USER_DATA_SCHEMA
)
if CONF_NAME not in user_input:
user_input[CONF_NAME] = DEFAULT_NAME
errors = {}
avr: Connection | None = None
@ -84,7 +81,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
user_input[CONF_MODEL] = avr.protocol.model
await self.async_set_unique_id(user_input[CONF_MAC])
self._abort_if_unique_id_configured()
return self.async_create_entry(title=user_input[CONF_NAME], data=user_input)
return self.async_create_entry(title=DEFAULT_NAME, data=user_input)
finally:
if avr is not None:
avr.close()

View File

@ -13,7 +13,7 @@ from homeassistant.components.media_player import (
MediaPlayerState,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_MAC, CONF_NAME
from homeassistant.const import CONF_MAC
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
@ -30,7 +30,7 @@ async def async_setup_entry(
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up entry."""
name = config_entry.data[CONF_NAME]
name = config_entry.title
mac_address = config_entry.data[CONF_MAC]
model = config_entry.data[CONF_MODEL]

View File

@ -48,7 +48,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
hass.data[DOMAIN].pop(entry.entry_id)
if unload_ok and DOMAIN in hass.data:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok

View File

@ -9,10 +9,12 @@ from aiohttp import web
from aiohttp.web_exceptions import HTTPBadRequest
import voluptuous as vol
from homeassistant.auth.models import User
from homeassistant.auth.permissions.const import POLICY_READ
from homeassistant.bootstrap import DATA_LOGGING
from homeassistant.components.http import HomeAssistantView, require_admin
from homeassistant.const import (
CONTENT_TYPE_JSON,
EVENT_HOMEASSISTANT_STOP,
MATCH_ALL,
URL_API,
@ -195,16 +197,24 @@ class APIStatesView(HomeAssistantView):
name = "api:states"
@ha.callback
def get(self, request):
def get(self, request: web.Request) -> web.Response:
"""Get current states."""
user = request["hass_user"]
entity_perm = user.permissions.check_entity
states = [
state
for state in request.app["hass"].states.async_all()
if entity_perm(state.entity_id, "read")
]
return self.json(states)
user: User = request["hass_user"]
hass: HomeAssistant = request.app["hass"]
if user.is_admin:
states = (state.as_dict_json for state in hass.states.async_all())
else:
entity_perm = user.permissions.check_entity
states = (
state.as_dict_json
for state in hass.states.async_all()
if entity_perm(state.entity_id, "read")
)
response = web.Response(
body=f'[{",".join(states)}]', content_type=CONTENT_TYPE_JSON
)
response.enable_compression()
return response
class APIEntityStateView(HomeAssistantView):
@ -214,14 +224,18 @@ class APIEntityStateView(HomeAssistantView):
name = "api:entity-state"
@ha.callback
def get(self, request, entity_id):
def get(self, request: web.Request, entity_id: str) -> web.Response:
"""Retrieve state of entity."""
user = request["hass_user"]
user: User = request["hass_user"]
hass: HomeAssistant = request.app["hass"]
if not user.permissions.check_entity(entity_id, POLICY_READ):
raise Unauthorized(entity_id=entity_id)
if state := request.app["hass"].states.get(entity_id):
return self.json(state)
if state := hass.states.get(entity_id):
return web.Response(
body=state.as_dict_json,
content_type=CONTENT_TYPE_JSON,
)
return self.json_message("Entity not found.", HTTPStatus.NOT_FOUND)
async def post(self, request, entity_id):
@ -256,7 +270,7 @@ class APIEntityStateView(HomeAssistantView):
# Read the state back for our response
status_code = HTTPStatus.CREATED if is_new_state else HTTPStatus.OK
resp = self.json(hass.states.get(entity_id), status_code)
resp = self.json(hass.states.get(entity_id).as_dict(), status_code)
resp.headers.add("Location", f"/api/states/{entity_id}")

View File

@ -26,7 +26,6 @@ from homeassistant.helpers.schema_config_entry_flow import (
SchemaFlowFormStep,
SchemaOptionsFlowHandler,
)
from homeassistant.util.network import is_ipv6_address
from .const import CONF_CREDENTIALS, CONF_IDENTIFIERS, CONF_START_OFF, DOMAIN
@ -184,9 +183,9 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
self, discovery_info: zeroconf.ZeroconfServiceInfo
) -> FlowResult:
"""Handle device found via zeroconf."""
host = discovery_info.host
if is_ipv6_address(host):
if discovery_info.ip_address.version == 6:
return self.async_abort(reason="ipv6_not_supported")
host = discovery_info.host
self._async_abort_entries_match({CONF_ADDRESS: host})
service_type = discovery_info.type[:-1] # Remove leading .
name = discovery_info.name.replace(f".{service_type}.", "")

View File

@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/apprise",
"iot_class": "cloud_push",
"loggers": ["apprise"],
"requirements": ["apprise==1.4.5"]
"requirements": ["apprise==1.5.0"]
}

View File

@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/aquostv",
"iot_class": "local_polling",
"loggers": ["sharp_aquos_rc"],
"requirements": ["sharp-aquos-rc==0.3.2"]
"requirements": ["sharp_aquos_rc==0.3.2"]
}

View File

@ -12,6 +12,7 @@ from homeassistant.helpers.typing import ConfigType
from .const import DATA_CONFIG, DOMAIN
from .error import PipelineNotFound
from .pipeline import (
AudioSettings,
Pipeline,
PipelineEvent,
PipelineEventCallback,
@ -33,6 +34,7 @@ __all__ = (
"async_get_pipelines",
"async_setup",
"async_pipeline_from_audio_stream",
"AudioSettings",
"Pipeline",
"PipelineEvent",
"PipelineEventType",
@ -71,6 +73,7 @@ async def async_pipeline_from_audio_stream(
conversation_id: str | None = None,
tts_audio_output: str | None = None,
wake_word_settings: WakeWordSettings | None = None,
audio_settings: AudioSettings | None = None,
device_id: str | None = None,
start_stage: PipelineStage = PipelineStage.STT,
end_stage: PipelineStage = PipelineStage.TTS,
@ -93,6 +96,7 @@ async def async_pipeline_from_audio_stream(
event_callback=event_callback,
tts_audio_output=tts_audio_output,
wake_word_settings=wake_word_settings,
audio_settings=audio_settings or AudioSettings(),
),
)
await pipeline_input.validate()

View File

@ -22,6 +22,14 @@ class WakeWordDetectionError(PipelineError):
"""Error in wake-word-detection portion of pipeline."""
class WakeWordDetectionAborted(WakeWordDetectionError):
"""Wake-word-detection was aborted."""
def __init__(self) -> None:
"""Set error message."""
super().__init__("wake_word_detection_aborted", "")
class WakeWordTimeoutError(WakeWordDetectionError):
"""Timeout when wake word was not detected."""

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/assist_pipeline",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["webrtcvad==2.0.10"]
"requirements": ["webrtc-noise-gain==1.2.3"]
}

View File

@ -1,7 +1,9 @@
"""Classes for voice assistant pipelines."""
from __future__ import annotations
import array
import asyncio
from collections import defaultdict, deque
from collections.abc import AsyncGenerator, AsyncIterable, Callable, Iterable
from dataclasses import asdict, dataclass, field
from enum import StrEnum
@ -10,10 +12,11 @@ from pathlib import Path
from queue import Queue
from threading import Thread
import time
from typing import Any, cast
from typing import Any, Final, cast
import wave
import voluptuous as vol
from webrtc_noise_gain import AudioProcessor
from homeassistant.components import (
conversation,
@ -29,6 +32,7 @@ from homeassistant.components.tts.media_source import (
from homeassistant.core import Context, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.collection import (
CHANGE_UPDATED,
CollectionError,
ItemNotFound,
SerializedStorageCollection,
@ -51,16 +55,17 @@ from .error import (
PipelineNotFound,
SpeechToTextError,
TextToSpeechError,
WakeWordDetectionAborted,
WakeWordDetectionError,
WakeWordTimeoutError,
)
from .ring_buffer import RingBuffer
from .vad import VoiceActivityTimeout, VoiceCommandSegmenter
from .vad import AudioBuffer, VoiceActivityTimeout, VoiceCommandSegmenter, chunk_samples
_LOGGER = logging.getLogger(__name__)
STORAGE_KEY = f"{DOMAIN}.pipelines"
STORAGE_VERSION = 1
STORAGE_VERSION_MINOR = 2
ENGINE_LANGUAGE_PAIRS = (
("stt_engine", "stt_language"),
@ -86,12 +91,17 @@ PIPELINE_FIELDS = {
vol.Required("tts_engine"): vol.Any(str, None),
vol.Required("tts_language"): vol.Any(str, None),
vol.Required("tts_voice"): vol.Any(str, None),
vol.Required("wake_word_entity"): vol.Any(str, None),
vol.Required("wake_word_id"): vol.Any(str, None),
}
STORED_PIPELINE_RUNS = 10
SAVE_DELAY = 10
AUDIO_PROCESSOR_SAMPLES: Final = 160 # 10 ms @ 16 Khz
AUDIO_PROCESSOR_BYTES: Final = AUDIO_PROCESSOR_SAMPLES * 2 # 16-bit samples
async def _async_resolve_default_pipeline_settings(
hass: HomeAssistant,
@ -111,6 +121,8 @@ async def _async_resolve_default_pipeline_settings(
tts_engine = None
tts_language = None
tts_voice = None
wake_word_entity = None
wake_word_id = None
# Find a matching language supported by the Home Assistant conversation agent
conversation_languages = language_util.matches(
@ -188,6 +200,8 @@ async def _async_resolve_default_pipeline_settings(
"tts_engine": tts_engine_id,
"tts_language": tts_language,
"tts_voice": tts_voice,
"wake_word_entity": wake_word_entity,
"wake_word_id": wake_word_id,
}
@ -295,6 +309,8 @@ class Pipeline:
tts_engine: str | None
tts_language: str | None
tts_voice: str | None
wake_word_entity: str | None
wake_word_id: str | None
id: str = field(default_factory=ulid_util.ulid)
@ -316,6 +332,8 @@ class Pipeline:
tts_engine=data["tts_engine"],
tts_language=data["tts_language"],
tts_voice=data["tts_voice"],
wake_word_entity=data["wake_word_entity"],
wake_word_id=data["wake_word_id"],
)
def to_json(self) -> dict[str, Any]:
@ -331,6 +349,8 @@ class Pipeline:
"tts_engine": self.tts_engine,
"tts_language": self.tts_language,
"tts_voice": self.tts_voice,
"wake_word_entity": self.wake_word_entity,
"wake_word_id": self.wake_word_id,
}
@ -380,6 +400,60 @@ class WakeWordSettings:
"""Seconds of audio to buffer before detection and forward to STT."""
@dataclass(frozen=True)
class AudioSettings:
"""Settings for pipeline audio processing."""
noise_suppression_level: int = 0
"""Level of noise suppression (0 = disabled, 4 = max)"""
auto_gain_dbfs: int = 0
"""Amount of automatic gain in dbFS (0 = disabled, 31 = max)"""
volume_multiplier: float = 1.0
"""Multiplier used directly on PCM samples (1.0 = no change, 2.0 = twice as loud)"""
is_vad_enabled: bool = True
"""True if VAD is used to determine the end of the voice command."""
is_chunking_enabled: bool = True
"""True if audio is automatically split into 10 ms chunks (required for VAD, etc.)"""
def __post_init__(self) -> None:
"""Verify settings post-initialization."""
if (self.noise_suppression_level < 0) or (self.noise_suppression_level > 4):
raise ValueError("noise_suppression_level must be in [0, 4]")
if (self.auto_gain_dbfs < 0) or (self.auto_gain_dbfs > 31):
raise ValueError("auto_gain_dbfs must be in [0, 31]")
if self.needs_processor and (not self.is_chunking_enabled):
raise ValueError("Chunking must be enabled for audio processing")
@property
def needs_processor(self) -> bool:
"""True if an audio processor is needed."""
return (
self.is_vad_enabled
or (self.noise_suppression_level > 0)
or (self.auto_gain_dbfs > 0)
)
@dataclass(frozen=True, slots=True)
class ProcessedAudioChunk:
"""Processed audio chunk and metadata."""
audio: bytes
"""Raw PCM audio @ 16Khz with 16-bit mono samples"""
timestamp_ms: int
"""Timestamp relative to start of audio stream (milliseconds)"""
is_speech: bool | None
"""True if audio chunk likely contains speech, False if not, None if unknown"""
@dataclass
class PipelineRun:
"""Running context for a pipeline."""
@ -395,13 +469,16 @@ class PipelineRun:
intent_agent: str | None = None
tts_audio_output: str | None = None
wake_word_settings: WakeWordSettings | None = None
audio_settings: AudioSettings = field(default_factory=AudioSettings)
id: str = field(default_factory=ulid_util.ulid)
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False)
tts_engine: str = field(init=False)
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False)
tts_engine: str = field(init=False, repr=False)
tts_options: dict | None = field(init=False, default=None)
wake_word_engine: str = field(init=False)
wake_word_provider: wake_word.WakeWordDetectionEntity = field(init=False)
wake_word_entity_id: str | None = field(init=False, default=None, repr=False)
wake_word_entity: wake_word.WakeWordDetectionEntity = field(init=False, repr=False)
abort_wake_word_detection: bool = field(init=False, default=False)
debug_recording_thread: Thread | None = None
"""Thread that records audio to debug_recording_dir"""
@ -409,6 +486,12 @@ class PipelineRun:
debug_recording_queue: Queue[str | bytes | None] | None = None
"""Queue to communicate with debug recording thread"""
audio_processor: AudioProcessor | None = None
"""VAD/noise suppression/auto gain"""
audio_processor_buffer: AudioBuffer = field(init=False, repr=False)
"""Buffer used when splitting audio into chunks for audio processing"""
def __post_init__(self) -> None:
"""Set language for pipeline."""
self.language = self.pipeline.language or self.hass.config.language
@ -420,21 +503,37 @@ class PipelineRun:
raise InvalidPipelineStagesError(self.start_stage, self.end_stage)
pipeline_data: PipelineData = self.hass.data[DOMAIN]
if self.pipeline.id not in pipeline_data.pipeline_runs:
pipeline_data.pipeline_runs[self.pipeline.id] = LimitedSizeDict(
if self.pipeline.id not in pipeline_data.pipeline_debug:
pipeline_data.pipeline_debug[self.pipeline.id] = LimitedSizeDict(
size_limit=STORED_PIPELINE_RUNS
)
pipeline_data.pipeline_runs[self.pipeline.id][self.id] = PipelineRunDebug()
pipeline_data.pipeline_debug[self.pipeline.id][self.id] = PipelineRunDebug()
pipeline_data.pipeline_runs.add_run(self)
# Initialize with audio settings
self.audio_processor_buffer = AudioBuffer(AUDIO_PROCESSOR_BYTES)
if self.audio_settings.needs_processor:
self.audio_processor = AudioProcessor(
self.audio_settings.auto_gain_dbfs,
self.audio_settings.noise_suppression_level,
)
def __eq__(self, other: Any) -> bool:
"""Compare pipeline runs by id."""
if isinstance(other, PipelineRun):
return self.id == other.id
return False
@callback
def process_event(self, event: PipelineEvent) -> None:
"""Log an event and call listener."""
self.event_callback(event)
pipeline_data: PipelineData = self.hass.data[DOMAIN]
if self.id not in pipeline_data.pipeline_runs[self.pipeline.id]:
if self.id not in pipeline_data.pipeline_debug[self.pipeline.id]:
# This run has been evicted from the logged pipeline runs already
return
pipeline_data.pipeline_runs[self.pipeline.id][self.id].events.append(event)
pipeline_data.pipeline_debug[self.pipeline.id][self.id].events.append(event)
def start(self, device_id: str | None) -> None:
"""Emit run start event."""
@ -461,31 +560,36 @@ class PipelineRun:
)
)
pipeline_data: PipelineData = self.hass.data[DOMAIN]
pipeline_data.pipeline_runs.remove_run(self)
async def prepare_wake_word_detection(self) -> None:
"""Prepare wake-word-detection."""
engine = wake_word.async_default_engine(self.hass)
if engine is None:
entity_id = self.pipeline.wake_word_entity or wake_word.async_default_entity(
self.hass
)
if entity_id is None:
raise WakeWordDetectionError(
code="wake-engine-missing",
message="No wake word engine",
)
wake_word_provider = wake_word.async_get_wake_word_detection_entity(
self.hass, engine
wake_word_entity = wake_word.async_get_wake_word_detection_entity(
self.hass, entity_id
)
if wake_word_provider is None:
if wake_word_entity is None:
raise WakeWordDetectionError(
code="wake-provider-missing",
message=f"No wake-word-detection provider for: {engine}",
message=f"No wake-word-detection provider for: {entity_id}",
)
self.wake_word_engine = engine
self.wake_word_provider = wake_word_provider
self.wake_word_entity_id = entity_id
self.wake_word_entity = wake_word_entity
async def wake_word_detection(
self,
stream: AsyncIterable[bytes],
audio_chunks_for_stt: list[bytes],
stream: AsyncIterable[ProcessedAudioChunk],
audio_chunks_for_stt: list[ProcessedAudioChunk],
) -> wake_word.DetectionResult | None:
"""Run wake-word-detection portion of pipeline. Returns detection result."""
metadata_dict = asdict(
@ -506,14 +610,14 @@ class PipelineRun:
PipelineEvent(
PipelineEventType.WAKE_WORD_START,
{
"engine": self.wake_word_engine,
"entity_id": self.wake_word_entity_id,
"metadata": metadata_dict,
},
)
)
if self.debug_recording_queue is not None:
self.debug_recording_queue.put_nowait(f"00_wake-{self.wake_word_engine}")
self.debug_recording_queue.put_nowait(f"00_wake-{self.wake_word_entity_id}")
wake_word_settings = self.wake_word_settings or WakeWordSettings()
@ -526,27 +630,31 @@ class PipelineRun:
# Audio chunk buffer. This audio will be forwarded to speech-to-text
# after wake-word-detection.
num_audio_bytes_to_buffer = int(
wake_word_settings.audio_seconds_to_buffer * 16000 * 2 # 16-bit @ 16Khz
num_audio_chunks_to_buffer = int(
(wake_word_settings.audio_seconds_to_buffer * 16000)
/ AUDIO_PROCESSOR_SAMPLES
)
stt_audio_buffer: RingBuffer | None = None
if num_audio_bytes_to_buffer > 0:
stt_audio_buffer = RingBuffer(num_audio_bytes_to_buffer)
stt_audio_buffer: deque[ProcessedAudioChunk] | None = None
if num_audio_chunks_to_buffer > 0:
stt_audio_buffer = deque(maxlen=num_audio_chunks_to_buffer)
try:
# Detect wake word(s)
result = await self.wake_word_provider.async_process_audio_stream(
result = await self.wake_word_entity.async_process_audio_stream(
self._wake_word_audio_stream(
audio_stream=stream,
stt_audio_buffer=stt_audio_buffer,
wake_word_vad=wake_word_vad,
)
),
self.pipeline.wake_word_id,
)
if stt_audio_buffer is not None:
# All audio kept from right before the wake word was detected as
# a single chunk.
audio_chunks_for_stt.append(stt_audio_buffer.getvalue())
audio_chunks_for_stt.extend(stt_audio_buffer)
except WakeWordDetectionAborted:
raise
except WakeWordTimeoutError:
_LOGGER.debug("Timeout during wake word detection")
raise
@ -570,7 +678,11 @@ class PipelineRun:
# speech-to-text so the user does not have to pause before
# speaking the voice command.
for chunk_ts in result.queued_audio:
audio_chunks_for_stt.append(chunk_ts[0])
audio_chunks_for_stt.append(
ProcessedAudioChunk(
audio=chunk_ts[0], timestamp_ms=chunk_ts[1], is_speech=False
)
)
wake_word_output = asdict(result)
@ -588,8 +700,8 @@ class PipelineRun:
async def _wake_word_audio_stream(
self,
audio_stream: AsyncIterable[bytes],
stt_audio_buffer: RingBuffer | None,
audio_stream: AsyncIterable[ProcessedAudioChunk],
stt_audio_buffer: deque[ProcessedAudioChunk] | None,
wake_word_vad: VoiceActivityTimeout | None,
sample_rate: int = 16000,
sample_width: int = 2,
@ -599,25 +711,27 @@ class PipelineRun:
Adds audio to a ring buffer that will be forwarded to speech-to-text after
detection. Times out if VAD detects enough silence.
"""
ms_per_sample = sample_rate // 1000
timestamp_ms = 0
chunk_seconds = AUDIO_PROCESSOR_SAMPLES / sample_rate
async for chunk in audio_stream:
if self.debug_recording_queue is not None:
self.debug_recording_queue.put_nowait(chunk)
if self.abort_wake_word_detection:
raise WakeWordDetectionAborted
yield chunk, timestamp_ms
timestamp_ms += (len(chunk) // sample_width) // ms_per_sample
if self.debug_recording_queue is not None:
self.debug_recording_queue.put_nowait(chunk.audio)
yield chunk.audio, chunk.timestamp_ms
# Wake-word-detection occurs *after* the wake word was actually
# spoken. Keeping audio right before detection allows the voice
# command to be spoken immediately after the wake word.
if stt_audio_buffer is not None:
stt_audio_buffer.put(chunk)
stt_audio_buffer.append(chunk)
if (wake_word_vad is not None) and (not wake_word_vad.process(chunk)):
raise WakeWordTimeoutError(
code="wake-word-timeout", message="Wake word was not detected"
)
if wake_word_vad is not None:
if not wake_word_vad.process(chunk_seconds, chunk.is_speech):
raise WakeWordTimeoutError(
code="wake-word-timeout", message="Wake word was not detected"
)
async def prepare_speech_to_text(self, metadata: stt.SpeechMetadata) -> None:
"""Prepare speech-to-text."""
@ -650,7 +764,7 @@ class PipelineRun:
async def speech_to_text(
self,
metadata: stt.SpeechMetadata,
stream: AsyncIterable[bytes],
stream: AsyncIterable[ProcessedAudioChunk],
) -> str:
"""Run speech-to-text portion of pipeline. Returns the spoken text."""
if isinstance(self.stt_provider, stt.Provider):
@ -674,11 +788,13 @@ class PipelineRun:
try:
# Transcribe audio stream
stt_vad: VoiceCommandSegmenter | None = None
if self.audio_settings.is_vad_enabled:
stt_vad = VoiceCommandSegmenter()
result = await self.stt_provider.async_process_audio_stream(
metadata,
self._speech_to_text_stream(
audio_stream=stream, stt_vad=VoiceCommandSegmenter()
),
self._speech_to_text_stream(audio_stream=stream, stt_vad=stt_vad),
)
except Exception as src_error:
_LOGGER.exception("Unexpected error during speech-to-text")
@ -715,26 +831,25 @@ class PipelineRun:
async def _speech_to_text_stream(
self,
audio_stream: AsyncIterable[bytes],
audio_stream: AsyncIterable[ProcessedAudioChunk],
stt_vad: VoiceCommandSegmenter | None,
sample_rate: int = 16000,
sample_width: int = 2,
) -> AsyncGenerator[bytes, None]:
"""Yield audio chunks until VAD detects silence or speech-to-text completes."""
ms_per_sample = sample_rate // 1000
chunk_seconds = AUDIO_PROCESSOR_SAMPLES / sample_rate
sent_vad_start = False
timestamp_ms = 0
async for chunk in audio_stream:
if self.debug_recording_queue is not None:
self.debug_recording_queue.put_nowait(chunk)
self.debug_recording_queue.put_nowait(chunk.audio)
if stt_vad is not None:
if not stt_vad.process(chunk):
if not stt_vad.process(chunk_seconds, chunk.is_speech):
# Silence detected at the end of voice command
self.process_event(
PipelineEvent(
PipelineEventType.STT_VAD_END,
{"timestamp": timestamp_ms},
{"timestamp": chunk.timestamp_ms},
)
)
break
@ -744,13 +859,12 @@ class PipelineRun:
self.process_event(
PipelineEvent(
PipelineEventType.STT_VAD_START,
{"timestamp": timestamp_ms},
{"timestamp": chunk.timestamp_ms},
)
)
sent_vad_start = True
yield chunk
timestamp_ms += (len(chunk) // sample_width) // ms_per_sample
yield chunk.audio
async def prepare_recognize_intent(self) -> None:
"""Prepare recognizing an intent."""
@ -961,6 +1075,87 @@ class PipelineRun:
self.debug_recording_queue = None
self.debug_recording_thread = None
async def process_volume_only(
self,
audio_stream: AsyncIterable[bytes],
sample_rate: int = 16000,
sample_width: int = 2,
) -> AsyncGenerator[ProcessedAudioChunk, None]:
"""Apply volume transformation only (no VAD/audio enhancements) with optional chunking."""
ms_per_sample = sample_rate // 1000
ms_per_chunk = (AUDIO_PROCESSOR_SAMPLES // sample_width) // ms_per_sample
timestamp_ms = 0
async for chunk in audio_stream:
if self.audio_settings.volume_multiplier != 1.0:
chunk = _multiply_volume(chunk, self.audio_settings.volume_multiplier)
if self.audio_settings.is_chunking_enabled:
# 10 ms chunking
for chunk_10ms in chunk_samples(
chunk, AUDIO_PROCESSOR_BYTES, self.audio_processor_buffer
):
yield ProcessedAudioChunk(
audio=chunk_10ms,
timestamp_ms=timestamp_ms,
is_speech=None, # no VAD
)
timestamp_ms += ms_per_chunk
else:
# No chunking
yield ProcessedAudioChunk(
audio=chunk,
timestamp_ms=timestamp_ms,
is_speech=None, # no VAD
)
timestamp_ms += (len(chunk) // sample_width) // ms_per_sample
async def process_enhance_audio(
self,
audio_stream: AsyncIterable[bytes],
sample_rate: int = 16000,
sample_width: int = 2,
) -> AsyncGenerator[ProcessedAudioChunk, None]:
"""Split audio into 10 ms chunks and apply VAD/noise suppression/auto gain/volume transformation."""
assert self.audio_processor is not None
ms_per_sample = sample_rate // 1000
ms_per_chunk = (AUDIO_PROCESSOR_SAMPLES // sample_width) // ms_per_sample
timestamp_ms = 0
async for dirty_samples in audio_stream:
if self.audio_settings.volume_multiplier != 1.0:
# Static gain
dirty_samples = _multiply_volume(
dirty_samples, self.audio_settings.volume_multiplier
)
# Split into 10ms chunks for audio enhancements/VAD
for dirty_10ms_chunk in chunk_samples(
dirty_samples, AUDIO_PROCESSOR_BYTES, self.audio_processor_buffer
):
ap_result = self.audio_processor.Process10ms(dirty_10ms_chunk)
yield ProcessedAudioChunk(
audio=ap_result.audio,
timestamp_ms=timestamp_ms,
is_speech=ap_result.is_speech,
)
timestamp_ms += ms_per_chunk
def _multiply_volume(chunk: bytes, volume_multiplier: float) -> bytes:
"""Multiplies 16-bit PCM samples by a constant."""
def _clamp(val: float) -> float:
"""Clamp to signed 16-bit."""
return max(-32768, min(32767, val))
return array.array(
"h",
(int(_clamp(value * volume_multiplier)) for value in array.array("h", chunk)),
).tobytes()
def _pipeline_debug_recording_thread_proc(
run_recording_dir: Path,
@ -1026,18 +1221,26 @@ class PipelineInput:
"""Run pipeline."""
self.run.start(device_id=self.device_id)
current_stage: PipelineStage | None = self.run.start_stage
stt_audio_buffer: list[bytes] = []
stt_audio_buffer: list[ProcessedAudioChunk] = []
stt_processed_stream: AsyncIterable[ProcessedAudioChunk] | None = None
if self.stt_stream is not None:
if self.run.audio_settings.needs_processor:
# VAD/noise suppression/auto gain/volume
stt_processed_stream = self.run.process_enhance_audio(self.stt_stream)
else:
# Volume multiplier only
stt_processed_stream = self.run.process_volume_only(self.stt_stream)
try:
if current_stage == PipelineStage.WAKE_WORD:
# wake-word-detection
assert self.stt_stream is not None
assert stt_processed_stream is not None
detect_result = await self.run.wake_word_detection(
self.stt_stream, stt_audio_buffer
stt_processed_stream, stt_audio_buffer
)
if detect_result is None:
# No wake word. Abort the rest of the pipeline.
await self.run.end()
return
current_stage = PipelineStage.STT
@ -1046,28 +1249,30 @@ class PipelineInput:
intent_input = self.intent_input
if current_stage == PipelineStage.STT:
assert self.stt_metadata is not None
assert self.stt_stream is not None
assert stt_processed_stream is not None
stt_stream = self.stt_stream
stt_input_stream = stt_processed_stream
if stt_audio_buffer:
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
# This is basically an async itertools.chain.
async def buffer_then_audio_stream() -> AsyncGenerator[bytes, None]:
async def buffer_then_audio_stream() -> AsyncGenerator[
ProcessedAudioChunk, None
]:
# Buffered audio
for chunk in stt_audio_buffer:
yield chunk
# Streamed audio
assert self.stt_stream is not None
async for chunk in self.stt_stream:
assert stt_processed_stream is not None
async for chunk in stt_processed_stream:
yield chunk
stt_stream = buffer_then_audio_stream()
stt_input_stream = buffer_then_audio_stream()
intent_input = await self.run.speech_to_text(
self.stt_metadata,
stt_stream,
stt_input_stream,
)
current_stage = PipelineStage.INTENT
@ -1362,13 +1567,46 @@ class PipelineStorageCollectionWebsocket(
connection.send_result(msg["id"])
@dataclass
class PipelineRuns:
"""Class managing pipelineruns."""
def __init__(self, pipeline_store: PipelineStorageCollection) -> None:
"""Initialize."""
self._pipeline_runs: dict[str, dict[str, PipelineRun]] = defaultdict(dict)
self._pipeline_store = pipeline_store
pipeline_store.async_add_listener(self._change_listener)
def add_run(self, pipeline_run: PipelineRun) -> None:
"""Add pipeline run."""
pipeline_id = pipeline_run.pipeline.id
self._pipeline_runs[pipeline_id][pipeline_run.id] = pipeline_run
def remove_run(self, pipeline_run: PipelineRun) -> None:
"""Remove pipeline run."""
pipeline_id = pipeline_run.pipeline.id
self._pipeline_runs[pipeline_id].pop(pipeline_run.id)
async def _change_listener(
self, change_type: str, item_id: str, change: dict
) -> None:
"""Handle pipeline store changes."""
if change_type != CHANGE_UPDATED:
return
if pipeline_runs := self._pipeline_runs.get(item_id):
# Create a temporary list in case the list is modified while we iterate
for pipeline_run in list(pipeline_runs.values()):
pipeline_run.abort_wake_word_detection = True
class PipelineData:
"""Store and debug data stored in hass.data."""
pipeline_runs: dict[str, LimitedSizeDict[str, PipelineRunDebug]]
pipeline_store: PipelineStorageCollection
pipeline_devices: set[str] = field(default_factory=set, init=False)
def __init__(self, pipeline_store: PipelineStorageCollection) -> None:
"""Initialize."""
self.pipeline_store = pipeline_store
self.pipeline_debug: dict[str, LimitedSizeDict[str, PipelineRunDebug]] = {}
self.pipeline_devices: set[str] = set()
self.pipeline_runs = PipelineRuns(pipeline_store)
@dataclass
@ -1382,11 +1620,35 @@ class PipelineRunDebug:
)
class PipelineStore(Store[SerializedPipelineStorageCollection]):
"""Store entity registry data."""
async def _async_migrate_func(
self,
old_major_version: int,
old_minor_version: int,
old_data: SerializedPipelineStorageCollection,
) -> SerializedPipelineStorageCollection:
"""Migrate to the new version."""
if old_major_version == 1 and old_minor_version < 2:
# Version 1.2 adds wake word configuration
for pipeline in old_data["items"]:
# Populate keys which were introduced before version 1.2
pipeline.setdefault("wake_word_entity", None)
pipeline.setdefault("wake_word_id", None)
if old_major_version > 1:
raise NotImplementedError
return old_data
@singleton(DOMAIN)
async def async_setup_pipeline_store(hass: HomeAssistant) -> PipelineData:
"""Set up the pipeline storage collection."""
pipeline_store = PipelineStorageCollection(
Store(hass, STORAGE_VERSION, STORAGE_KEY)
PipelineStore(
hass, STORAGE_VERSION, STORAGE_KEY, minor_version=STORAGE_VERSION_MINOR
)
)
await pipeline_store.async_load()
PipelineStorageCollectionWebsocket(
@ -1396,4 +1658,4 @@ async def async_setup_pipeline_store(hass: HomeAssistant) -> PipelineData:
PIPELINE_FIELDS,
PIPELINE_FIELDS,
).async_setup(hass)
return PipelineData({}, pipeline_store)
return PipelineData(pipeline_store)

View File

@ -1,12 +1,13 @@
"""Voice activity detection."""
from __future__ import annotations
from abc import ABC, abstractmethod
from collections.abc import Iterable
from dataclasses import dataclass, field
from dataclasses import dataclass
from enum import StrEnum
from typing import Final
from typing import Final, cast
import webrtcvad
from webrtc_noise_gain import AudioProcessor
_SAMPLE_RATE: Final = 16000 # Hz
_SAMPLE_WIDTH: Final = 2 # bytes
@ -32,6 +33,38 @@ class VadSensitivity(StrEnum):
return 1.0
class VoiceActivityDetector(ABC):
"""Base class for voice activity detectors (VAD)."""
@abstractmethod
def is_speech(self, chunk: bytes) -> bool:
"""Return True if audio chunk contains speech."""
@property
@abstractmethod
def samples_per_chunk(self) -> int | None:
"""Return number of samples per chunk or None if chunking is not required."""
class WebRtcVad(VoiceActivityDetector):
"""Voice activity detector based on webrtc."""
def __init__(self) -> None:
"""Initialize webrtcvad."""
# Just VAD: no noise suppression or auto gain
self._audio_processor = AudioProcessor(0, 0)
def is_speech(self, chunk: bytes) -> bool:
"""Return True if audio chunk contains speech."""
result = self._audio_processor.Process10ms(chunk)
return cast(bool, result.is_speech)
@property
def samples_per_chunk(self) -> int | None:
"""Return 10 ms."""
return int(0.01 * _SAMPLE_RATE) # 10 ms
class AudioBuffer:
"""Fixed-sized audio buffer with variable internal length."""
@ -73,13 +106,7 @@ class AudioBuffer:
@dataclass
class VoiceCommandSegmenter:
"""Segments an audio stream into voice commands using webrtcvad."""
vad_mode: int = 3
"""Aggressiveness in filtering out non-speech. 3 is the most aggressive."""
vad_samples_per_chunk: int = 480 # 30 ms
"""Must be 10, 20, or 30 ms at 16Khz."""
"""Segments an audio stream into voice commands."""
speech_seconds: float = 0.3
"""Seconds of speech before voice command has started."""
@ -108,85 +135,85 @@ class VoiceCommandSegmenter:
_reset_seconds_left: float = 0.0
"""Seconds left before resetting start/stop time counters."""
_vad: webrtcvad.Vad = None
_leftover_chunk_buffer: AudioBuffer = field(init=False)
_bytes_per_chunk: int = field(init=False)
_seconds_per_chunk: float = field(init=False)
def __post_init__(self) -> None:
"""Initialize VAD."""
self._vad = webrtcvad.Vad(self.vad_mode)
self._bytes_per_chunk = self.vad_samples_per_chunk * _SAMPLE_WIDTH
self._seconds_per_chunk = self.vad_samples_per_chunk / _SAMPLE_RATE
self._leftover_chunk_buffer = AudioBuffer(
self.vad_samples_per_chunk * _SAMPLE_WIDTH
)
"""Reset after initialization."""
self.reset()
def reset(self) -> None:
"""Reset all counters and state."""
self._leftover_chunk_buffer.clear()
self._speech_seconds_left = self.speech_seconds
self._silence_seconds_left = self.silence_seconds
self._timeout_seconds_left = self.timeout_seconds
self._reset_seconds_left = self.reset_seconds
self.in_command = False
def process(self, samples: bytes) -> bool:
"""Process 16-bit 16Khz mono audio samples.
def process(self, chunk_seconds: float, is_speech: bool | None) -> bool:
"""Process samples using external VAD.
Returns False when command is done.
"""
for chunk in chunk_samples(
samples, self._bytes_per_chunk, self._leftover_chunk_buffer
):
if not self._process_chunk(chunk):
self.reset()
return False
return True
@property
def audio_buffer(self) -> bytes:
"""Get partial chunk in the audio buffer."""
return self._leftover_chunk_buffer.bytes()
def _process_chunk(self, chunk: bytes) -> bool:
"""Process a single chunk of 16-bit 16Khz mono audio.
Returns False when command is done.
"""
is_speech = self._vad.is_speech(chunk, _SAMPLE_RATE)
self._timeout_seconds_left -= self._seconds_per_chunk
self._timeout_seconds_left -= chunk_seconds
if self._timeout_seconds_left <= 0:
self.reset()
return False
if not self.in_command:
if is_speech:
self._reset_seconds_left = self.reset_seconds
self._speech_seconds_left -= self._seconds_per_chunk
self._speech_seconds_left -= chunk_seconds
if self._speech_seconds_left <= 0:
# Inside voice command
self.in_command = True
else:
# Reset if enough silence
self._reset_seconds_left -= self._seconds_per_chunk
self._reset_seconds_left -= chunk_seconds
if self._reset_seconds_left <= 0:
self._speech_seconds_left = self.speech_seconds
elif not is_speech:
self._reset_seconds_left = self.reset_seconds
self._silence_seconds_left -= self._seconds_per_chunk
self._silence_seconds_left -= chunk_seconds
if self._silence_seconds_left <= 0:
self.reset()
return False
else:
# Reset if enough speech
self._reset_seconds_left -= self._seconds_per_chunk
self._reset_seconds_left -= chunk_seconds
if self._reset_seconds_left <= 0:
self._silence_seconds_left = self.silence_seconds
return True
def process_with_vad(
self,
chunk: bytes,
vad: VoiceActivityDetector,
leftover_chunk_buffer: AudioBuffer | None,
) -> bool:
"""Process an audio chunk using an external VAD.
A buffer is required if the VAD requires fixed-sized audio chunks (usually the case).
Returns False when voice command is finished.
"""
if vad.samples_per_chunk is None:
# No chunking
chunk_seconds = (len(chunk) // _SAMPLE_WIDTH) / _SAMPLE_RATE
is_speech = vad.is_speech(chunk)
return self.process(chunk_seconds, is_speech)
if leftover_chunk_buffer is None:
raise ValueError("leftover_chunk_buffer is required when vad uses chunking")
# With chunking
seconds_per_chunk = vad.samples_per_chunk / _SAMPLE_RATE
bytes_per_chunk = vad.samples_per_chunk * _SAMPLE_WIDTH
for vad_chunk in chunk_samples(chunk, bytes_per_chunk, leftover_chunk_buffer):
is_speech = vad.is_speech(vad_chunk)
if not self.process(seconds_per_chunk, is_speech):
return False
return True
@dataclass
class VoiceActivityTimeout:
@ -198,73 +225,43 @@ class VoiceActivityTimeout:
reset_seconds: float = 0.5
"""Seconds of speech before resetting timeout."""
vad_mode: int = 3
"""Aggressiveness in filtering out non-speech. 3 is the most aggressive."""
vad_samples_per_chunk: int = 480 # 30 ms
"""Must be 10, 20, or 30 ms at 16Khz."""
_silence_seconds_left: float = 0.0
"""Seconds left before considering voice command as stopped."""
_reset_seconds_left: float = 0.0
"""Seconds left before resetting start/stop time counters."""
_vad: webrtcvad.Vad = None
_leftover_chunk_buffer: AudioBuffer = field(init=False)
_bytes_per_chunk: int = field(init=False)
_seconds_per_chunk: float = field(init=False)
def __post_init__(self) -> None:
"""Initialize VAD."""
self._vad = webrtcvad.Vad(self.vad_mode)
self._bytes_per_chunk = self.vad_samples_per_chunk * _SAMPLE_WIDTH
self._seconds_per_chunk = self.vad_samples_per_chunk / _SAMPLE_RATE
self._leftover_chunk_buffer = AudioBuffer(
self.vad_samples_per_chunk * _SAMPLE_WIDTH
)
"""Reset after initialization."""
self.reset()
def reset(self) -> None:
"""Reset all counters and state."""
self._leftover_chunk_buffer.clear()
self._silence_seconds_left = self.silence_seconds
self._reset_seconds_left = self.reset_seconds
def process(self, samples: bytes) -> bool:
"""Process 16-bit 16Khz mono audio samples.
def process(self, chunk_seconds: float, is_speech: bool | None) -> bool:
"""Process samples using external VAD.
Returns False when timeout is reached.
"""
for chunk in chunk_samples(
samples, self._bytes_per_chunk, self._leftover_chunk_buffer
):
if not self._process_chunk(chunk):
return False
return True
def _process_chunk(self, chunk: bytes) -> bool:
"""Process a single chunk of 16-bit 16Khz mono audio.
Returns False when timeout is reached.
"""
if self._vad.is_speech(chunk, _SAMPLE_RATE):
if is_speech:
# Speech
self._reset_seconds_left -= self._seconds_per_chunk
self._reset_seconds_left -= chunk_seconds
if self._reset_seconds_left <= 0:
# Reset timeout
self._silence_seconds_left = self.silence_seconds
else:
# Silence
self._silence_seconds_left -= self._seconds_per_chunk
self._silence_seconds_left -= chunk_seconds
if self._silence_seconds_left <= 0:
# Timeout reached
self.reset()
return False
# Slowly build reset counter back up
self._reset_seconds_left = min(
self.reset_seconds, self._reset_seconds_left + self._seconds_per_chunk
self.reset_seconds, self._reset_seconds_left + chunk_seconds
)
return True

View File

@ -18,6 +18,7 @@ from homeassistant.util import language as language_util
from .const import DOMAIN
from .error import PipelineNotFound
from .pipeline import (
AudioSettings,
PipelineData,
PipelineError,
PipelineEvent,
@ -29,8 +30,8 @@ from .pipeline import (
async_get_pipeline,
)
DEFAULT_TIMEOUT = 30
DEFAULT_WAKE_WORD_TIMEOUT = 3
DEFAULT_TIMEOUT = 60 * 5 # seconds
DEFAULT_WAKE_WORD_TIMEOUT = 3 # seconds
_LOGGER = logging.getLogger(__name__)
@ -71,6 +72,13 @@ def async_register_websocket_api(hass: HomeAssistant) -> None:
vol.Optional("audio_seconds_to_buffer"): vol.Any(
float, int
),
# Audio enhancement
vol.Optional("noise_suppression_level"): int,
vol.Optional("auto_gain_dbfs"): int,
vol.Optional("volume_multiplier"): float,
# Advanced use cases/testing
vol.Optional("no_vad"): bool,
vol.Optional("no_chunking"): bool,
}
},
extra=vol.ALLOW_EXTRA,
@ -115,6 +123,7 @@ async def websocket_run(
handler_id: int | None = None
unregister_handler: Callable[[], None] | None = None
wake_word_settings: WakeWordSettings | None = None
audio_settings: AudioSettings | None = None
# Arguments to PipelineInput
input_args: dict[str, Any] = {
@ -124,13 +133,14 @@ async def websocket_run(
if start_stage in (PipelineStage.WAKE_WORD, PipelineStage.STT):
# Audio pipeline that will receive audio as binary websocket messages
msg_input = msg["input"]
audio_queue: asyncio.Queue[bytes] = asyncio.Queue()
incoming_sample_rate = msg["input"]["sample_rate"]
incoming_sample_rate = msg_input["sample_rate"]
if start_stage == PipelineStage.WAKE_WORD:
wake_word_settings = WakeWordSettings(
timeout=msg["input"].get("timeout", DEFAULT_WAKE_WORD_TIMEOUT),
audio_seconds_to_buffer=msg["input"].get("audio_seconds_to_buffer", 0),
audio_seconds_to_buffer=msg_input.get("audio_seconds_to_buffer", 0),
)
async def stt_stream() -> AsyncGenerator[bytes, None]:
@ -166,6 +176,15 @@ async def websocket_run(
channel=stt.AudioChannels.CHANNEL_MONO,
)
input_args["stt_stream"] = stt_stream()
# Audio settings
audio_settings = AudioSettings(
noise_suppression_level=msg_input.get("noise_suppression_level", 0),
auto_gain_dbfs=msg_input.get("auto_gain_dbfs", 0),
volume_multiplier=msg_input.get("volume_multiplier", 1.0),
is_vad_enabled=not msg_input.get("no_vad", False),
is_chunking_enabled=not msg_input.get("no_chunking", False),
)
elif start_stage == PipelineStage.INTENT:
# Input to conversation agent
input_args["intent_input"] = msg["input"]["text"]
@ -185,6 +204,7 @@ async def websocket_run(
"timeout": timeout,
},
wake_word_settings=wake_word_settings,
audio_settings=audio_settings or AudioSettings(),
)
pipeline_input = PipelineInput(**input_args)
@ -238,18 +258,18 @@ def websocket_list_runs(
pipeline_data: PipelineData = hass.data[DOMAIN]
pipeline_id = msg["pipeline_id"]
if pipeline_id not in pipeline_data.pipeline_runs:
if pipeline_id not in pipeline_data.pipeline_debug:
connection.send_result(msg["id"], {"pipeline_runs": []})
return
pipeline_runs = pipeline_data.pipeline_runs[pipeline_id]
pipeline_debug = pipeline_data.pipeline_debug[pipeline_id]
connection.send_result(
msg["id"],
{
"pipeline_runs": [
{"pipeline_run_id": id, "timestamp": pipeline_run.timestamp}
for id, pipeline_run in pipeline_runs.items()
for id, pipeline_run in pipeline_debug.items()
]
},
)
@ -274,7 +294,7 @@ def websocket_get_run(
pipeline_id = msg["pipeline_id"]
pipeline_run_id = msg["pipeline_run_id"]
if pipeline_id not in pipeline_data.pipeline_runs:
if pipeline_id not in pipeline_data.pipeline_debug:
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_FOUND,
@ -282,9 +302,9 @@ def websocket_get_run(
)
return
pipeline_runs = pipeline_data.pipeline_runs[pipeline_id]
pipeline_debug = pipeline_data.pipeline_debug[pipeline_id]
if pipeline_run_id not in pipeline_runs:
if pipeline_run_id not in pipeline_debug:
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_FOUND,
@ -294,7 +314,7 @@ def websocket_get_run(
connection.send_result(
msg["id"],
{"events": pipeline_runs[pipeline_run_id].events},
{"events": pipeline_debug[pipeline_run_id].events},
)
@ -332,7 +352,7 @@ async def websocket_list_languages(
dialect = language_util.Dialect.parse(language_tag)
languages.add(dialect.language)
if pipeline_languages is not None:
pipeline_languages &= languages
pipeline_languages = language_util.intersect(pipeline_languages, languages)
else:
pipeline_languages = languages
@ -342,11 +362,15 @@ async def websocket_list_languages(
dialect = language_util.Dialect.parse(language_tag)
languages.add(dialect.language)
if pipeline_languages is not None:
pipeline_languages &= languages
pipeline_languages = language_util.intersect(pipeline_languages, languages)
else:
pipeline_languages = languages
connection.send_result(
msg["id"],
{"languages": pipeline_languages},
{
"languages": sorted(pipeline_languages)
if pipeline_languages
else pipeline_languages
},
)

View File

@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/asterisk_mbox",
"iot_class": "local_push",
"loggers": ["asterisk_mbox"],
"requirements": ["asterisk-mbox==0.5.0"]
"requirements": ["asterisk_mbox==0.5.0"]
}

View File

@ -26,12 +26,16 @@ DOMAIN = "august"
OPERATION_METHOD_AUTORELOCK = "autorelock"
OPERATION_METHOD_REMOTE = "remote"
OPERATION_METHOD_KEYPAD = "keypad"
OPERATION_METHOD_MANUAL = "manual"
OPERATION_METHOD_TAG = "tag"
OPERATION_METHOD_MOBILE_DEVICE = "mobile"
ATTR_OPERATION_AUTORELOCK = "autorelock"
ATTR_OPERATION_METHOD = "method"
ATTR_OPERATION_REMOTE = "remote"
ATTR_OPERATION_KEYPAD = "keypad"
ATTR_OPERATION_MANUAL = "manual"
ATTR_OPERATION_TAG = "tag"
# Limit battery, online, and hardware updates to hourly
# in order to reduce the number of api requests and

View File

@ -28,5 +28,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==1.9.0", "yalexs-ble==2.3.0"]
"requirements": ["yalexs==1.10.0", "yalexs-ble==2.3.0"]
}

View File

@ -33,13 +33,17 @@ from . import AugustData
from .const import (
ATTR_OPERATION_AUTORELOCK,
ATTR_OPERATION_KEYPAD,
ATTR_OPERATION_MANUAL,
ATTR_OPERATION_METHOD,
ATTR_OPERATION_REMOTE,
ATTR_OPERATION_TAG,
DOMAIN,
OPERATION_METHOD_AUTORELOCK,
OPERATION_METHOD_KEYPAD,
OPERATION_METHOD_MANUAL,
OPERATION_METHOD_MOBILE_DEVICE,
OPERATION_METHOD_REMOTE,
OPERATION_METHOD_TAG,
)
from .entity import AugustEntityMixin
@ -183,6 +187,8 @@ class AugustOperatorSensor(AugustEntityMixin, RestoreEntity, SensorEntity):
self._device = device
self._operated_remote = None
self._operated_keypad = None
self._operated_manual = None
self._operated_tag = None
self._operated_autorelock = None
self._operated_time = None
self._attr_unique_id = f"{self._device_id}_lock_operator"
@ -200,6 +206,8 @@ class AugustOperatorSensor(AugustEntityMixin, RestoreEntity, SensorEntity):
self._attr_native_value = lock_activity.operated_by
self._operated_remote = lock_activity.operated_remote
self._operated_keypad = lock_activity.operated_keypad
self._operated_manual = lock_activity.operated_manual
self._operated_tag = lock_activity.operated_tag
self._operated_autorelock = lock_activity.operated_autorelock
self._attr_entity_picture = lock_activity.operator_thumbnail_url
@ -212,6 +220,10 @@ class AugustOperatorSensor(AugustEntityMixin, RestoreEntity, SensorEntity):
attributes[ATTR_OPERATION_REMOTE] = self._operated_remote
if self._operated_keypad is not None:
attributes[ATTR_OPERATION_KEYPAD] = self._operated_keypad
if self._operated_manual is not None:
attributes[ATTR_OPERATION_MANUAL] = self._operated_manual
if self._operated_tag is not None:
attributes[ATTR_OPERATION_TAG] = self._operated_tag
if self._operated_autorelock is not None:
attributes[ATTR_OPERATION_AUTORELOCK] = self._operated_autorelock
@ -219,6 +231,10 @@ class AugustOperatorSensor(AugustEntityMixin, RestoreEntity, SensorEntity):
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_REMOTE
elif self._operated_keypad:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_KEYPAD
elif self._operated_manual:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MANUAL
elif self._operated_tag:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_TAG
elif self._operated_autorelock:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_AUTORELOCK
else:
@ -241,6 +257,10 @@ class AugustOperatorSensor(AugustEntityMixin, RestoreEntity, SensorEntity):
self._operated_remote = last_state.attributes[ATTR_OPERATION_REMOTE]
if ATTR_OPERATION_KEYPAD in last_state.attributes:
self._operated_keypad = last_state.attributes[ATTR_OPERATION_KEYPAD]
if ATTR_OPERATION_MANUAL in last_state.attributes:
self._operated_manual = last_state.attributes[ATTR_OPERATION_MANUAL]
if ATTR_OPERATION_TAG in last_state.attributes:
self._operated_tag = last_state.attributes[ATTR_OPERATION_TAG]
if ATTR_OPERATION_AUTORELOCK in last_state.attributes:
self._operated_autorelock = last_state.attributes[ATTR_OPERATION_AUTORELOCK]

View File

@ -5,7 +5,7 @@ import logging
from auroranoaa import AuroraForecast
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, Platform
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client
@ -29,11 +29,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
longitude = conf[CONF_LONGITUDE]
latitude = conf[CONF_LATITUDE]
threshold = options.get(CONF_THRESHOLD, DEFAULT_THRESHOLD)
name = conf[CONF_NAME]
coordinator = AuroraDataUpdateCoordinator(
hass=hass,
name=name,
api=api,
latitude=latitude,
longitude=longitude,

View File

@ -1,4 +1,4 @@
"""Config flow for SpaceX Launches and Starman."""
"""Config flow for Aurora."""
from __future__ import annotations
import logging
@ -8,7 +8,7 @@ from auroranoaa import AuroraForecast
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.schema_config_entry_flow import (
@ -16,7 +16,7 @@ from homeassistant.helpers.schema_config_entry_flow import (
SchemaOptionsFlowHandler,
)
from .const import CONF_THRESHOLD, DEFAULT_NAME, DEFAULT_THRESHOLD, DOMAIN
from .const import CONF_THRESHOLD, DEFAULT_THRESHOLD, DOMAIN
_LOGGER = logging.getLogger(__name__)
@ -50,7 +50,6 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
errors = {}
if user_input is not None:
name = user_input[CONF_NAME]
longitude = user_input[CONF_LONGITUDE]
latitude = user_input[CONF_LATITUDE]
@ -70,7 +69,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"Aurora - {name}", data=user_input
title="Aurora visibility", data=user_input
)
return self.async_show_form(
@ -78,13 +77,11 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
data_schema=self.add_suggested_values_to_schema(
vol.Schema(
{
vol.Required(CONF_NAME): str,
vol.Required(CONF_LONGITUDE): cv.longitude,
vol.Required(CONF_LATITUDE): cv.latitude,
}
),
{
CONF_NAME: DEFAULT_NAME,
CONF_LONGITUDE: self.hass.config.longitude,
CONF_LATITUDE: self.hass.config.latitude,
},

View File

@ -6,4 +6,3 @@ AURORA_API = "aurora_api"
CONF_THRESHOLD = "forecast_threshold"
DEFAULT_THRESHOLD = 75
ATTRIBUTION = "Data provided by the National Oceanic and Atmospheric Administration"
DEFAULT_NAME = "Aurora Visibility"

View File

@ -18,7 +18,6 @@ class AuroraDataUpdateCoordinator(DataUpdateCoordinator):
def __init__(
self,
hass: HomeAssistant,
name: str,
api: AuroraForecast,
latitude: float,
longitude: float,
@ -29,12 +28,11 @@ class AuroraDataUpdateCoordinator(DataUpdateCoordinator):
super().__init__(
hass=hass,
logger=_LOGGER,
name=name,
name="Aurora",
update_interval=timedelta(minutes=5),
)
self.api = api
self.name = name
self.latitude = int(latitude)
self.longitude = int(longitude)
self.threshold = int(threshold)

View File

@ -29,14 +29,9 @@ class AuroraEntity(CoordinatorEntity[AuroraDataUpdateCoordinator]):
self._attr_translation_key = translation_key
self._attr_unique_id = f"{coordinator.latitude}_{coordinator.longitude}"
self._attr_icon = icon
@property
def device_info(self) -> DeviceInfo:
"""Define the device based on name."""
return DeviceInfo(
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, str(self.unique_id))},
identifiers={(DOMAIN, self._attr_unique_id)},
manufacturer="NOAA",
model="Aurora Visibility Sensor",
name=self.coordinator.name,
)

View File

@ -3,11 +3,10 @@ from __future__ import annotations
from datetime import timedelta
import logging
from typing import Any
from aiohttp import ClientError
from aussiebb.asyncio import AussieBB
from aussiebb.const import FETCH_TYPES, NBN_TYPES, PHONE_TYPES
from aussiebb.const import FETCH_TYPES
from aussiebb.exceptions import AuthenticationException, UnrecognisedServiceType
from homeassistant.config_entries import ConfigEntry
@ -23,19 +22,6 @@ _LOGGER = logging.getLogger(__name__)
PLATFORMS = [Platform.SENSOR]
# Backport for the pyaussiebb=0.0.15 validate_service_type method
def validate_service_type(service: dict[str, Any]) -> None:
"""Check the service types against known types."""
if "type" not in service:
raise ValueError("Field 'type' not found in service data")
if service["type"] not in NBN_TYPES + PHONE_TYPES + ["Hardware"]:
raise UnrecognisedServiceType(
f"Service type {service['type']=} {service['name']=} - not recognised - ",
"please report this at https://github.com/yaleman/aussiebb/issues/new",
)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Aussie Broadband from a config entry."""
# Login to the Aussie Broadband API and retrieve the current service list
@ -44,9 +30,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry.data[CONF_PASSWORD],
async_get_clientsession(hass),
)
# Overwrite the pyaussiebb=0.0.15 validate_service_type method with backport
# Required until pydantic 2.x is supported
client.validate_service_type = validate_service_type
try:
await client.login()
services = await client.get_services(drop_types=FETCH_TYPES)
@ -61,10 +45,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
try:
return await client.get_usage(service_id)
except UnrecognisedServiceType as err:
raise UpdateFailed(
f"Service {service_id} of type '{services[service_id]['type']}' was"
" unrecognised"
) from err
raise UpdateFailed(f"Service {service_id} was unrecognised") from err
return async_update_data

View File

@ -57,9 +57,6 @@ from homeassistant.helpers import condition
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.integration_platform import (
async_process_integration_platform_for_component,
)
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.script import (
@ -249,10 +246,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
LOGGER, DOMAIN, hass
)
# Process integration platforms right away since
# we will create entities before firing EVENT_COMPONENT_LOADED
await async_process_integration_platform_for_component(hass, DOMAIN)
# Register automation as valid domain for Blueprint
async_get_blueprints(hass)
@ -314,6 +307,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
class BaseAutomationEntity(ToggleEntity, ABC):
"""Base class for automation entities."""
_entity_component_unrecorded_attributes = frozenset(
(ATTR_LAST_TRIGGERED, ATTR_MODE, ATTR_CUR, ATTR_MAX, CONF_ID)
)
raw_config: ConfigType | None
@property

View File

@ -9,8 +9,9 @@ blueprint:
name: Motion Sensor
selector:
entity:
domain: binary_sensor
device_class: motion
filter:
device_class: motion
domain: binary_sensor
light_target:
name: Light
selector:

View File

@ -9,18 +9,21 @@ blueprint:
name: Person
selector:
entity:
domain: person
filter:
domain: person
zone_entity:
name: Zone
selector:
entity:
domain: zone
filter:
domain: zone
notify_device:
name: Device to notify
description: Device needs to run the official Home Assistant app to receive notifications.
selector:
device:
integration: mobile_app
filter:
integration: mobile_app
trigger:
platform: state

View File

@ -1,12 +0,0 @@
"""Integration platform for recorder."""
from __future__ import annotations
from homeassistant.core import HomeAssistant, callback
from . import ATTR_CUR, ATTR_LAST_TRIGGERED, ATTR_MAX, ATTR_MODE, CONF_ID
@callback
def exclude_attributes(hass: HomeAssistant) -> set[str]:
"""Exclude extra attributes from being recorded in the database."""
return {ATTR_LAST_TRIGGERED, ATTR_MODE, ATTR_CUR, ATTR_MAX, CONF_ID}

View File

@ -1,29 +1,16 @@
"""The awair component."""
from __future__ import annotations
from asyncio import gather, timeout
from dataclasses import dataclass
from datetime import timedelta
from aiohttp import ClientSession
from python_awair import Awair, AwairLocal
from python_awair.air_data import AirData
from python_awair.devices import AwairBaseDevice, AwairLocalDevice
from python_awair.exceptions import AuthError, AwairError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, Platform
from homeassistant.const import CONF_HOST, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
API_TIMEOUT,
DOMAIN,
LOGGER,
UPDATE_INTERVAL_CLOUD,
UPDATE_INTERVAL_LOCAL,
from .const import DOMAIN
from .coordinator import (
AwairCloudDataUpdateCoordinator,
AwairDataUpdateCoordinator,
AwairLocalDataUpdateCoordinator,
)
PLATFORMS = [Platform.SENSOR]
@ -70,93 +57,3 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
@dataclass
class AwairResult:
"""Wrapper class to hold an awair device and set of air data."""
device: AwairBaseDevice
air_data: AirData
class AwairDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AwairResult]]):
"""Define a wrapper class to update Awair data."""
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
update_interval: timedelta | None,
) -> None:
"""Set up the AwairDataUpdateCoordinator class."""
self._config_entry = config_entry
self.title = config_entry.title
super().__init__(hass, LOGGER, name=DOMAIN, update_interval=update_interval)
async def _fetch_air_data(self, device: AwairBaseDevice) -> AwairResult:
"""Fetch latest air quality data."""
LOGGER.debug("Fetching data for %s", device.uuid)
air_data = await device.air_data_latest()
LOGGER.debug(air_data)
return AwairResult(device=device, air_data=air_data)
class AwairCloudDataUpdateCoordinator(AwairDataUpdateCoordinator):
"""Define a wrapper class to update Awair data from Cloud API."""
def __init__(
self, hass: HomeAssistant, config_entry: ConfigEntry, session: ClientSession
) -> None:
"""Set up the AwairCloudDataUpdateCoordinator class."""
access_token = config_entry.data[CONF_ACCESS_TOKEN]
self._awair = Awair(access_token=access_token, session=session)
super().__init__(hass, config_entry, UPDATE_INTERVAL_CLOUD)
async def _async_update_data(self) -> dict[str, AwairResult]:
"""Update data via Awair client library."""
async with timeout(API_TIMEOUT):
try:
LOGGER.debug("Fetching users and devices")
user = await self._awair.user()
devices = await user.devices()
results = await gather(
*(self._fetch_air_data(device) for device in devices)
)
return {result.device.uuid: result for result in results}
except AuthError as err:
raise ConfigEntryAuthFailed from err
except Exception as err:
raise UpdateFailed(err) from err
class AwairLocalDataUpdateCoordinator(AwairDataUpdateCoordinator):
"""Define a wrapper class to update Awair data from the local API."""
_device: AwairLocalDevice | None = None
def __init__(
self, hass: HomeAssistant, config_entry: ConfigEntry, session: ClientSession
) -> None:
"""Set up the AwairLocalDataUpdateCoordinator class."""
self._awair = AwairLocal(
session=session, device_addrs=[config_entry.data[CONF_HOST]]
)
super().__init__(hass, config_entry, UPDATE_INTERVAL_LOCAL)
async def _async_update_data(self) -> dict[str, AwairResult]:
"""Update data via Awair client library."""
async with timeout(API_TIMEOUT):
try:
if self._device is None:
LOGGER.debug("Fetching devices")
devices = await self._awair.devices()
self._device = devices[0]
result = await self._fetch_air_data(self._device)
return {result.device.uuid: result}
except AwairError as err:
LOGGER.error("Unexpected API error: %s", err)
raise UpdateFailed(err) from err

View File

@ -0,0 +1,116 @@
"""DataUpdateCoordinators for awair integration."""
from __future__ import annotations
from asyncio import gather, timeout
from dataclasses import dataclass
from datetime import timedelta
from aiohttp import ClientSession
from python_awair import Awair, AwairLocal
from python_awair.air_data import AirData
from python_awair.devices import AwairBaseDevice, AwairLocalDevice
from python_awair.exceptions import AuthError, AwairError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
API_TIMEOUT,
DOMAIN,
LOGGER,
UPDATE_INTERVAL_CLOUD,
UPDATE_INTERVAL_LOCAL,
)
@dataclass
class AwairResult:
"""Wrapper class to hold an awair device and set of air data."""
device: AwairBaseDevice
air_data: AirData
class AwairDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AwairResult]]):
"""Define a wrapper class to update Awair data."""
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
update_interval: timedelta | None,
) -> None:
"""Set up the AwairDataUpdateCoordinator class."""
self._config_entry = config_entry
self.title = config_entry.title
super().__init__(hass, LOGGER, name=DOMAIN, update_interval=update_interval)
async def _fetch_air_data(self, device: AwairBaseDevice) -> AwairResult:
"""Fetch latest air quality data."""
LOGGER.debug("Fetching data for %s", device.uuid)
air_data = await device.air_data_latest()
LOGGER.debug(air_data)
return AwairResult(device=device, air_data=air_data)
class AwairCloudDataUpdateCoordinator(AwairDataUpdateCoordinator):
"""Define a wrapper class to update Awair data from Cloud API."""
def __init__(
self, hass: HomeAssistant, config_entry: ConfigEntry, session: ClientSession
) -> None:
"""Set up the AwairCloudDataUpdateCoordinator class."""
access_token = config_entry.data[CONF_ACCESS_TOKEN]
self._awair = Awair(access_token=access_token, session=session)
super().__init__(hass, config_entry, UPDATE_INTERVAL_CLOUD)
async def _async_update_data(self) -> dict[str, AwairResult]:
"""Update data via Awair client library."""
async with timeout(API_TIMEOUT):
try:
LOGGER.debug("Fetching users and devices")
user = await self._awair.user()
devices = await user.devices()
results = await gather(
*(self._fetch_air_data(device) for device in devices)
)
return {result.device.uuid: result for result in results}
except AuthError as err:
raise ConfigEntryAuthFailed from err
except Exception as err:
raise UpdateFailed(err) from err
class AwairLocalDataUpdateCoordinator(AwairDataUpdateCoordinator):
"""Define a wrapper class to update Awair data from the local API."""
_device: AwairLocalDevice | None = None
def __init__(
self, hass: HomeAssistant, config_entry: ConfigEntry, session: ClientSession
) -> None:
"""Set up the AwairLocalDataUpdateCoordinator class."""
self._awair = AwairLocal(
session=session, device_addrs=[config_entry.data[CONF_HOST]]
)
super().__init__(hass, config_entry, UPDATE_INTERVAL_LOCAL)
async def _async_update_data(self) -> dict[str, AwairResult]:
"""Update data via Awair client library."""
async with timeout(API_TIMEOUT):
try:
if self._device is None:
LOGGER.debug("Fetching devices")
devices = await self._awair.devices()
self._device = devices[0]
result = await self._fetch_air_data(self._device)
return {result.device.uuid: result}
except AwairError as err:
LOGGER.error("Unexpected API error: %s", err)
raise UpdateFailed(err) from err

View File

@ -31,7 +31,6 @@ from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AwairDataUpdateCoordinator, AwairResult
from .const import (
API_CO2,
API_DUST,
@ -46,6 +45,7 @@ from .const import (
ATTRIBUTION,
DOMAIN,
)
from .coordinator import AwairDataUpdateCoordinator, AwairResult
DUST_ALIASES = [API_PM25, API_PM10]

View File

@ -14,7 +14,6 @@ from homeassistant import config_entries
from homeassistant.components import zeroconf
from homeassistant.const import CONF_IP_ADDRESS
from homeassistant.data_entry_flow import FlowResult
from homeassistant.util.network import is_ipv6_address
from .const import DOMAIN, RUN_TIMEOUT
from .models import BAFDiscovery
@ -49,10 +48,10 @@ class BAFFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
self, discovery_info: zeroconf.ZeroconfServiceInfo
) -> FlowResult:
"""Handle zeroconf discovery."""
if discovery_info.ip_address.version == 6:
return self.async_abort(reason="ipv6_not_supported")
properties = discovery_info.properties
ip_address = discovery_info.host
if is_ipv6_address(ip_address):
return self.async_abort(reason="ipv6_not_supported")
uuid = properties["uuid"]
model = properties["model"]
name = properties["name"]

View File

@ -59,7 +59,7 @@ def validate_input(auth: Auth) -> None:
raise Require2FA
def _send_blink_2fa_pin(auth: Auth, pin: str) -> bool:
def _send_blink_2fa_pin(auth: Auth, pin: str | None) -> bool:
"""Send 2FA pin to blink servers."""
blink = Blink()
blink.auth = auth
@ -122,8 +122,9 @@ class BlinkConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle 2FA step."""
errors = {}
if user_input is not None:
pin = user_input.get(CONF_PIN)
pin: str | None = user_input.get(CONF_PIN)
try:
assert self.auth
valid_token = await self.hass.async_add_executor_job(
_send_blink_2fa_pin, self.auth, pin
)

View File

@ -100,6 +100,7 @@ class BloomSkySensor(SensorEntity):
self._sensor_name = sensor_name
self._attr_name = f"{device['DeviceName']} {sensor_name}"
self._attr_unique_id = f"{self._device_id}-{sensor_name}"
self._attr_device_class = SENSOR_DEVICE_CLASS.get(sensor_name)
self._attr_native_unit_of_measurement = SENSOR_UNITS_IMPERIAL.get(
sensor_name, None
)
@ -108,11 +109,6 @@ class BloomSkySensor(SensorEntity):
sensor_name, None
)
@property
def device_class(self) -> SensorDeviceClass | None:
"""Return the class of this device, from component DEVICE_CLASSES."""
return SENSOR_DEVICE_CLASS.get(self._sensor_name)
def update(self) -> None:
"""Request an update from the BloomSky API."""
self._bloomsky.refresh_devices()

View File

@ -45,6 +45,8 @@ from .api import (
async_ble_device_from_address,
async_discovered_service_info,
async_get_advertisement_callback,
async_get_fallback_availability_interval,
async_get_learned_advertising_interval,
async_get_scanner,
async_last_service_info,
async_process_advertisements,
@ -54,6 +56,7 @@ from .api import (
async_scanner_by_source,
async_scanner_count,
async_scanner_devices_by_address,
async_set_fallback_availability_interval,
async_track_unavailable,
)
from .base_scanner import BaseHaRemoteScanner, BaseHaScanner, BluetoothScannerDevice
@ -86,12 +89,15 @@ __all__ = [
"async_address_present",
"async_ble_device_from_address",
"async_discovered_service_info",
"async_get_fallback_availability_interval",
"async_get_learned_advertising_interval",
"async_get_scanner",
"async_last_service_info",
"async_process_advertisements",
"async_rediscover_address",
"async_register_callback",
"async_register_scanner",
"async_set_fallback_availability_interval",
"async_track_unavailable",
"async_scanner_by_source",
"async_scanner_count",

View File

@ -110,7 +110,7 @@ class ActiveBluetoothDataUpdateCoordinator(
return False
poll_age: float | None = None
if self._last_poll:
poll_age = monotonic_time_coarse() - self._last_poll
poll_age = service_info.time - self._last_poll
return self._needs_poll_method(service_info, poll_age)
async def _async_poll_data(

View File

@ -103,7 +103,7 @@ class ActiveBluetoothProcessorCoordinator(
return False
poll_age: float | None = None
if self._last_poll:
poll_age = monotonic_time_coarse() - self._last_poll
poll_age = service_info.time - self._last_poll
return self._needs_poll_method(service_info, poll_age)
async def _async_poll_data(

View File

@ -138,7 +138,7 @@ async def async_process_advertisements(
timeout: int,
) -> BluetoothServiceInfoBleak:
"""Process advertisements until callback returns true or timeout expires."""
done: Future[BluetoothServiceInfoBleak] = Future()
done: Future[BluetoothServiceInfoBleak] = hass.loop.create_future()
@hass_callback
def _async_discovered_device(
@ -197,3 +197,27 @@ def async_get_advertisement_callback(
) -> Callable[[BluetoothServiceInfoBleak], None]:
"""Get the advertisement callback."""
return _get_manager(hass).scanner_adv_received
@hass_callback
def async_get_learned_advertising_interval(
hass: HomeAssistant, address: str
) -> float | None:
"""Get the learned advertising interval for a MAC address."""
return _get_manager(hass).async_get_learned_advertising_interval(address)
@hass_callback
def async_get_fallback_availability_interval(
hass: HomeAssistant, address: str
) -> float | None:
"""Get the fallback availability timeout for a MAC address."""
return _get_manager(hass).async_get_fallback_availability_interval(address)
@hass_callback
def async_set_fallback_availability_interval(
hass: HomeAssistant, address: str, interval: float
) -> None:
"""Override the fallback availability timeout for a MAC address."""
_get_manager(hass).async_set_fallback_availability_interval(address, interval)

View File

@ -131,6 +131,9 @@ class BaseHaScanner(ABC):
self.name,
SCANNER_WATCHDOG_TIMEOUT,
)
self.scanning = False
return
self.scanning = not self._connecting
@contextmanager
def connecting(self) -> Generator[None, None, None]:
@ -302,6 +305,7 @@ class BaseHaRemoteScanner(BaseHaScanner):
advertisement_monotonic_time: float,
) -> None:
"""Call the registered callback."""
self.scanning = not self._connecting
self._last_detection = advertisement_monotonic_time
try:
prev_discovery = self._discovered_device_advertisement_datas[address]

View File

@ -18,7 +18,7 @@ from bluetooth_adapters import (
)
from homeassistant import config_entries
from homeassistant.components.logger import EVENT_LOGGING_CHANGED
from homeassistant.const import EVENT_LOGGING_CHANGED
from homeassistant.core import (
CALLBACK_TYPE,
Event,
@ -108,6 +108,7 @@ class BluetoothManager:
"_cancel_unavailable_tracking",
"_cancel_logging_listener",
"_advertisement_tracker",
"_fallback_intervals",
"_unavailable_callbacks",
"_connectable_unavailable_callbacks",
"_callback_index",
@ -139,6 +140,7 @@ class BluetoothManager:
self._cancel_logging_listener: CALLBACK_TYPE | None = None
self._advertisement_tracker = AdvertisementTracker()
self._fallback_intervals: dict[str, float] = {}
self._unavailable_callbacks: dict[
str, list[Callable[[BluetoothServiceInfoBleak], None]]
@ -342,7 +344,9 @@ class BluetoothManager:
# since it may have gone to sleep and since we do not need an active
# connection to it we can only determine its availability
# by the lack of advertisements
if advertising_interval := intervals.get(address):
if advertising_interval := (
intervals.get(address) or self._fallback_intervals.get(address)
):
advertising_interval += TRACKER_BUFFERING_WOBBLE_SECONDS
else:
advertising_interval = (
@ -355,6 +359,7 @@ class BluetoothManager:
# The second loop (connectable=False) is responsible for removing
# the device from all the interval tracking since it is no longer
# available for both connectable and non-connectable
self._fallback_intervals.pop(address, None)
tracker.async_remove_address(address)
self._integration_matcher.async_clear_address(address)
self._async_dismiss_discoveries(address)
@ -386,7 +391,10 @@ class BluetoothManager:
"""Prefer previous advertisement from a different source if it is better."""
if new.time - old.time > (
stale_seconds := self._advertisement_tracker.intervals.get(
new.address, FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS
new.address,
self._fallback_intervals.get(
new.address, FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS
),
)
):
# If the old advertisement is stale, any new advertisement is preferred
@ -779,3 +787,20 @@ class BluetoothManager:
def async_allocate_connection_slot(self, device: BLEDevice) -> bool:
"""Allocate a connection slot."""
return self.slot_manager.allocate_slot(device)
@hass_callback
def async_get_learned_advertising_interval(self, address: str) -> float | None:
"""Get the learned advertising interval for a MAC address."""
return self._advertisement_tracker.intervals.get(address)
@hass_callback
def async_get_fallback_availability_interval(self, address: str) -> float | None:
"""Get the fallback availability timeout for a MAC address."""
return self._fallback_intervals.get(address)
@hass_callback
def async_set_fallback_availability_interval(
self, address: str, interval: float
) -> None:
"""Override the fallback availability timeout for a MAC address."""
self._fallback_intervals[address] = interval

View File

@ -3,7 +3,7 @@
"name": "Bluetooth",
"codeowners": ["@bdraco"],
"config_flow": true,
"dependencies": ["logger", "usb"],
"dependencies": ["usb"],
"documentation": "https://www.home-assistant.io/integrations/bluetooth",
"iot_class": "local_push",
"loggers": [
@ -15,10 +15,10 @@
"quality_scale": "internal",
"requirements": [
"bleak==0.21.1",
"bleak-retry-connector==3.1.3",
"bleak-retry-connector==3.2.1",
"bluetooth-adapters==0.16.1",
"bluetooth-auto-recovery==1.2.3",
"bluetooth-data-tools==1.11.0",
"dbus-fast==1.95.2"
"bluetooth-data-tools==1.12.0",
"dbus-fast==2.11.0"
]
}

View File

@ -85,6 +85,7 @@ class PassiveBluetoothDataUpdateCoordinator(
change: BluetoothChange,
) -> None:
"""Handle a Bluetooth event."""
self._available = True
self.async_update_listeners()

View File

@ -341,7 +341,8 @@ class PassiveBluetoothProcessorCoordinator(
change: BluetoothChange,
) -> None:
"""Handle a Bluetooth event."""
super()._async_handle_bluetooth_event(service_info, change)
was_available = self._available
self._available = True
if self.hass.is_stopping:
return
@ -359,7 +360,7 @@ class PassiveBluetoothProcessorCoordinator(
self.logger.info("Coordinator %s recovered", self.name)
for processor in self._processors:
processor.async_handle_update(update)
processor.async_handle_update(update, was_available)
_PassiveBluetoothDataProcessorT = TypeVar(
@ -516,20 +517,39 @@ class PassiveBluetoothDataProcessor(Generic[_T]):
@callback
def async_update_listeners(
self, data: PassiveBluetoothDataUpdate[_T] | None
self,
data: PassiveBluetoothDataUpdate[_T] | None,
was_available: bool | None = None,
) -> None:
"""Update all registered listeners."""
if was_available is None:
was_available = self.coordinator.available
# Dispatch to listeners without a filter key
for update_callback in self._listeners:
update_callback(data)
if not was_available or data is None:
# When data is None, or was_available is False,
# dispatch to all listeners as it means the device
# is flipping between available and unavailable
for listeners in self._entity_key_listeners.values():
for update_callback in listeners:
update_callback(data)
return
# Dispatch to listeners with a filter key
for listeners in self._entity_key_listeners.values():
for update_callback in listeners:
update_callback(data)
# if the key is in the data
entity_key_listeners = self._entity_key_listeners
for entity_key in data.entity_data:
if maybe_listener := entity_key_listeners.get(entity_key):
for update_callback in maybe_listener:
update_callback(data)
@callback
def async_handle_update(self, update: _T) -> None:
def async_handle_update(
self, update: _T, was_available: bool | None = None
) -> None:
"""Handle a Bluetooth event."""
try:
new_data = self.update_method(update)
@ -554,7 +574,7 @@ class PassiveBluetoothDataProcessor(Generic[_T]):
)
self.data.update(new_data)
self.async_update_listeners(new_data)
self.async_update_listeners(new_data, was_available)
class PassiveBluetoothProcessorEntity(Entity, Generic[_PassiveBluetoothDataProcessorT]):

View File

@ -329,6 +329,9 @@ class HaScanner(BaseHaScanner):
self.name,
SCANNER_WATCHDOG_TIMEOUT,
)
# Immediately mark the scanner as not scanning
# since the restart task will have to wait for the lock
self.scanning = False
self.hass.async_create_task(self._async_restart_scanner())
async def _async_restart_scanner(self) -> None:

View File

@ -39,6 +39,8 @@ class BasePassiveBluetoothCoordinator(ABC):
self.mode = mode
self._last_unavailable_time = 0.0
self._last_name = address
# Subclasses are responsible for setting _available to True
# when the abstractmethod _async_handle_bluetooth_event is called.
self._available = async_address_present(hass, address, connectable)
@callback
@ -88,23 +90,13 @@ class BasePassiveBluetoothCoordinator(ABC):
"""Return if the device is available."""
return self._available
@callback
def _async_handle_bluetooth_event_internal(
self,
service_info: BluetoothServiceInfoBleak,
change: BluetoothChange,
) -> None:
"""Handle a bluetooth event."""
self._available = True
self._async_handle_bluetooth_event(service_info, change)
@callback
def _async_start(self) -> None:
"""Start the callbacks."""
self._on_stop.append(
async_register_callback(
self.hass,
self._async_handle_bluetooth_event_internal,
self._async_handle_bluetooth_event,
BluetoothCallbackMatcher(
address=self.address, connectable=self.connectable
),

View File

@ -2,7 +2,6 @@
from __future__ import annotations
import asyncio
from collections.abc import Awaitable
from datetime import datetime, timedelta
import logging
from typing import Final
@ -152,7 +151,7 @@ async def async_setup_scanner(
async def perform_bluetooth_update() -> None:
"""Discover Bluetooth devices and update status."""
_LOGGER.debug("Performing Bluetooth devices discovery and update")
tasks: list[Awaitable[None]] = []
tasks: list[asyncio.Task[None]] = []
try:
if track_new:

View File

@ -13,6 +13,7 @@ from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import LENGTH, PERCENTAGE, VOLUME, UnitOfElectricCurrent
@ -94,6 +95,7 @@ SENSOR_TYPES: dict[str, BMWSensorEntityDescription] = {
key_class="fuel_and_battery",
unit_type=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
state_class=SensorStateClass.MEASUREMENT,
),
# --- Specific ---
"mileage": BMWSensorEntityDescription(
@ -102,6 +104,7 @@ SENSOR_TYPES: dict[str, BMWSensorEntityDescription] = {
icon="mdi:speedometer",
unit_type=LENGTH,
value=lambda x, hass: convert_and_round(x, hass.config.units.length, 2),
state_class=SensorStateClass.TOTAL_INCREASING,
),
"remaining_range_total": BMWSensorEntityDescription(
key="remaining_range_total",
@ -110,6 +113,7 @@ SENSOR_TYPES: dict[str, BMWSensorEntityDescription] = {
icon="mdi:map-marker-distance",
unit_type=LENGTH,
value=lambda x, hass: convert_and_round(x, hass.config.units.length, 2),
state_class=SensorStateClass.MEASUREMENT,
),
"remaining_range_electric": BMWSensorEntityDescription(
key="remaining_range_electric",
@ -118,6 +122,7 @@ SENSOR_TYPES: dict[str, BMWSensorEntityDescription] = {
icon="mdi:map-marker-distance",
unit_type=LENGTH,
value=lambda x, hass: convert_and_round(x, hass.config.units.length, 2),
state_class=SensorStateClass.MEASUREMENT,
),
"remaining_range_fuel": BMWSensorEntityDescription(
key="remaining_range_fuel",
@ -126,6 +131,7 @@ SENSOR_TYPES: dict[str, BMWSensorEntityDescription] = {
icon="mdi:map-marker-distance",
unit_type=LENGTH,
value=lambda x, hass: convert_and_round(x, hass.config.units.length, 2),
state_class=SensorStateClass.MEASUREMENT,
),
"remaining_fuel": BMWSensorEntityDescription(
key="remaining_fuel",
@ -134,6 +140,7 @@ SENSOR_TYPES: dict[str, BMWSensorEntityDescription] = {
icon="mdi:gas-station",
unit_type=VOLUME,
value=lambda x, hass: convert_and_round(x, hass.config.units.volume, 2),
state_class=SensorStateClass.MEASUREMENT,
),
"remaining_fuel_percent": BMWSensorEntityDescription(
key="remaining_fuel_percent",
@ -141,6 +148,7 @@ SENSOR_TYPES: dict[str, BMWSensorEntityDescription] = {
key_class="fuel_and_battery",
icon="mdi:gas-station",
unit_type=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
}

View File

@ -17,7 +17,7 @@ from homeassistant.const import (
ATTR_SW_VERSION,
ATTR_VIA_DEVICE,
)
from homeassistant.core import CALLBACK_TYPE, callback
from homeassistant.core import CALLBACK_TYPE, HassJob, callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_call_later
@ -68,6 +68,9 @@ class BondEntity(Entity):
self._attr_assumed_state = self._hub.is_bridge and not self._device.trust_state
self._apply_state()
self._bpup_polling_fallback: CALLBACK_TYPE | None = None
self._async_update_if_bpup_not_alive_job = HassJob(
self._async_update_if_bpup_not_alive
)
@property
def device_info(self) -> DeviceInfo:
@ -185,7 +188,7 @@ class BondEntity(Entity):
self._bpup_polling_fallback = async_call_later(
self.hass,
_BPUP_ALIVE_SCAN_INTERVAL if alive else _FALLBACK_SCAN_INTERVAL,
self._async_update_if_bpup_not_alive,
self._async_update_if_bpup_not_alive_job,
)
async def async_will_remove_from_hass(self) -> None:

View File

@ -10,6 +10,9 @@
},
"credentials": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "Password of the Smart Home Controller"
}
},

Some files were not shown because too many files have changed in this diff Show More