Merge pull request #38065 from home-assistant/rc

pull/38071/head 0.113.0
Franck Nijhof 2020-07-22 17:45:03 +02:00 committed by GitHub
commit c9380d4972
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1230 changed files with 25857 additions and 8412 deletions

View File

@ -214,7 +214,14 @@ omit =
homeassistant/components/emoncms_history/*
homeassistant/components/emulated_hue/upnp.py
homeassistant/components/enigma2/media_player.py
homeassistant/components/enocean/*
homeassistant/components/enocean/__init__.py
homeassistant/components/enocean/binary_sensor.py
homeassistant/components/enocean/const.py
homeassistant/components/enocean/device.py
homeassistant/components/enocean/dongle.py
homeassistant/components/enocean/light.py
homeassistant/components/enocean/sensor.py
homeassistant/components/enocean/switch.py
homeassistant/components/enphase_envoy/sensor.py
homeassistant/components/entur_public_transport/*
homeassistant/components/environment_canada/*
@ -313,6 +320,7 @@ omit =
homeassistant/components/guardian/binary_sensor.py
homeassistant/components/guardian/sensor.py
homeassistant/components/guardian/switch.py
homeassistant/components/guardian/util.py
homeassistant/components/habitica/*
homeassistant/components/hangouts/*
homeassistant/components/hangouts/__init__.py
@ -372,7 +380,6 @@ omit =
homeassistant/components/ihc/*
homeassistant/components/imap/sensor.py
homeassistant/components/imap_email_content/sensor.py
homeassistant/components/influxdb/sensor.py
homeassistant/components/insteon/*
homeassistant/components/incomfort/*
homeassistant/components/intesishome/*
@ -531,6 +538,7 @@ omit =
homeassistant/components/netatmo/climate.py
homeassistant/components/netatmo/const.py
homeassistant/components/netatmo/sensor.py
homeassistant/components/netatmo/webhook.py
homeassistant/components/netdata/sensor.py
homeassistant/components/netgear/device_tracker.py
homeassistant/components/netgear_lte/*
@ -621,9 +629,12 @@ omit =
homeassistant/components/plugwise/climate.py
homeassistant/components/plugwise/sensor.py
homeassistant/components/plugwise/switch.py
homeassistant/components/plum_lightpad/*
homeassistant/components/plum_lightpad/light.py
homeassistant/components/pocketcasts/sensor.py
homeassistant/components/point/*
homeassistant/components/poolsense/__init__.py
homeassistant/components/poolsense/sensor.py
homeassistant/components/poolsense/binary_sensor.py
homeassistant/components/prezzibenzina/sensor.py
homeassistant/components/proliphix/climate.py
homeassistant/components/prometheus/*
@ -731,7 +742,9 @@ omit =
homeassistant/components/smappee/sensor.py
homeassistant/components/smappee/switch.py
homeassistant/components/smarty/*
homeassistant/components/smarthab/*
homeassistant/components/smarthab/__init__.py
homeassistant/components/smarthab/cover.py
homeassistant/components/smarthab/light.py
homeassistant/components/sms/*
homeassistant/components/smtp/notify.py
homeassistant/components/snapcast/*

8
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,8 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "06:00"
open-pull-requests-limit: 10

784
.github/workflows/ci.yaml vendored Normal file
View File

@ -0,0 +1,784 @@
name: CI
# yamllint disable-line rule:truthy
on:
push:
branches:
- dev
- rc
- master
pull_request: ~
env:
DEFAULT_PYTHON: 3.7
PRE_COMMIT_HOME: ~/.cache/pre-commit
jobs:
# Separate job to pre-populate the base dependency cache
# This prevent upcoming jobs to do the same individually
prepare-base:
name: Prepare base dependencies
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v2
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
restore-keys: |
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_test.txt') }}-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-
- name: Create Python virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
python -m venv venv
. venv/bin/activate
pip install -U pip setuptools
pip install -r requirements.txt -r requirements_test.txt
# Uninstalling typing as a workaround. Eventually we should make sure
# all our dependencies drop typing.
# Find offending deps with `pipdeptree -r -p typing`
pip uninstall -y typing
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: |
${{ runner.os }}-pre-commit-
- name: Install pre-commit dependencies
if: steps.cache-precommit.outputs.cache-hit != 'true'
run: |
. venv/bin/activate
pre-commit install-hooks
lint-bandit:
name: Check bandit
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run bandit
run: |
. venv/bin/activate
pre-commit run --hook-stage manual bandit --all-files --show-diff-on-failure
lint-black:
name: Check black
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run black
run: |
. venv/bin/activate
pre-commit run --hook-stage manual black --all-files --show-diff-on-failure
lint-codespell:
name: Check codespell
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register codespell problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/codespell.json"
- name: Run codespell
run: |
. venv/bin/activate
pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files
lint-dockerfile:
name: Check Dockerfile
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
- name: Check Dockerfile
uses: docker://hadolint/hadolint:v1.18.0
with:
args: hadolint Dockerfile
- name: Check Dockerfile.dev
uses: docker://hadolint/hadolint:v1.18.0
with:
args: hadolint Dockerfile.dev
lint-executable-shebangs:
name: Check executables
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register check executables problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
- name: Run executables check
run: |
. venv/bin/activate
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
lint-flake8:
name: Check flake8
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register flake8 problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/flake8.json"
- name: Run flake8
run: |
. venv/bin/activate
pre-commit run --hook-stage manual flake8 --all-files
lint-isort:
name: Check isort
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run isort
run: |
. venv/bin/activate
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
lint-json:
name: Check JSON
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register check-json problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/check-json.json"
- name: Run check-json
run: |
. venv/bin/activate
pre-commit run --hook-stage manual check-json --all-files
lint-pyupgrade:
name: Check pyupgrade
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run pyupgrade
run: |
. venv/bin/activate
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
# Disabled until we have the existing issues fixed
# lint-shellcheck:
# name: Check ShellCheck
# runs-on: ubuntu-latest
# needs: prepare-base
# steps:
# - name: Check out code from GitHub
# uses: actions/checkout@v2
# - name: Run ShellCheck
# uses: ludeeus/action-shellcheck@0.3.0
lint-yaml:
name: Check YAML
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register yamllint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
- name: Run yamllint
run: |
. venv/bin/activate
pre-commit run --hook-stage manual yamllint --all-files --show-diff-on-failure
hassfest:
name: Check hassfest
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run hassfest
run: |
. venv/bin/activate
python -m script.hassfest --action validate
gen-requirements-all:
name: Check all requirements
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version
}}-${{ hashFiles('requirements.txt') }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run gen_requirements_all.py
run: |
. venv/bin/activate
python -m script.gen_requirements_all validate
prepare-tests:
name: Prepare tests for Python ${{ matrix.python-version }}
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.7, 3.8]
container: homeassistant/ci-azure:${{ matrix.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name:
Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-venv-${{ matrix.python-version }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('requirements_all.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
restore-keys: |
${{ runner.os }}-venv-${{ matrix.python-version }}-${{ hashFiles('requirements_test.txt') }}-${{ hashFiles('requirements_all.txt') }}
${{ runner.os }}-venv-${{ matrix.python-version }}-${{ hashFiles('requirements_test.txt') }}
${{ runner.os }}-venv-${{ matrix.python-version }}-
- name:
Create full Python ${{ matrix.python-version }} virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
python -m venv venv
. venv/bin/activate
pip install -U pip setuptools wheel
pip install -r requirements_all.txt
pip install -r requirements_test.txt
# Uninstalling typing as a workaround. Eventually we should make sure
# all our dependencies drop typing.
# Find offending deps with `pipdeptree -r -p typing`
pip uninstall -y typing
pip install -e .
pylint:
name: Check pylint
runs-on: ubuntu-latest
needs: prepare-tests
strategy:
matrix:
python-version: [3.7]
container: homeassistant/ci-azure:${{ matrix.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name:
Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-venv-${{ matrix.python-version }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('requirements_all.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register pylint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/pylint.json"
- name: Run pylint
run: |
. venv/bin/activate
pylint homeassistant
mypy:
name: Check mypy
runs-on: ubuntu-latest
needs: prepare-tests
strategy:
matrix:
python-version: [3.7]
container: homeassistant/ci-azure:${{ matrix.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name:
Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-venv-${{ matrix.python-version }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('requirements_all.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register mypy problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/mypy.json"
- name: Run mypy
run: |
. venv/bin/activate
mypy homeassistant
pytest:
runs-on: ubuntu-latest
needs: prepare-tests
strategy:
matrix:
group: [1, 2, 3, 4]
python-version: [3.7, 3.8]
name: >-
Run tests Python ${{ matrix.python-version }} (group ${{ matrix.group }})
container: homeassistant/ci-azure:${{ matrix.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name:
Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-venv-${{ matrix.python-version }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('requirements_all.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register Python problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
- name: Install Pytest Annotation plugin
run: |
. venv/bin/activate
# Ideally this should be part of our dependencies
# However this plugin is fairly new and doesn't run correctly
# on a non-GitHub environment.
pip install pytest-github-actions-annotate-failures
- name: Run pytest
run: |
. venv/bin/activate
pytest \
-qq \
--timeout=9 \
--durations=10 \
-n auto \
--dist=loadfile \
--test-group-count 4 \
--test-group=${{ matrix.group }} \
--cov homeassistant \
-o console_output_style=count \
-p no:sugar \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@2.1.0
with:
name: coverage-${{ matrix.python-version }}-group${{ matrix.group }}
path: .coverage
- name: Check dirty
run: |
./script/check_dirty
coverage:
name: Process test coverage
runs-on: ubuntu-latest
needs: pytest
strategy:
matrix:
python-version: [3.7]
container: homeassistant/ci-azure:${{ matrix.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name:
Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache@v2
with:
path: venv
key: >-
${{ runner.os }}-venv-${{ matrix.python-version }}-${{
hashFiles('requirements_test.txt') }}-${{
hashFiles('requirements_all.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v2
- name: Combine coverage results
run: |
. venv/bin/activate
coverage combine coverage*/.coverage*
coverage report --fail-under=94
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1.0.10

View File

@ -0,0 +1,14 @@
{
"problemMatcher": [
{
"owner": "check-executables-have-shebangs",
"pattern": [
{
"regexp": "^(.+):\\s(.+)$",
"file": 1,
"message": 2
}
]
}
]
}

View File

@ -0,0 +1,16 @@
{
"problemMatcher": [
{
"owner": "check-json",
"pattern": [
{
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
"file": 1,
"message": 2,
"line": 3,
"column": 4
}
]
}
]
}

View File

@ -0,0 +1,16 @@
{
"problemMatcher": [
{
"owner": "codespell",
"severity": "warning",
"pattern": [
{
"regexp": "^(.+):(\\d+):\\s(.+)$",
"file": 1,
"line": 2,
"message": 3
}
]
}
]
}

30
.github/workflows/matchers/flake8.json vendored Normal file
View File

@ -0,0 +1,30 @@
{
"problemMatcher": [
{
"owner": "flake8-error",
"severity": "error",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([EF]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
},
{
"owner": "flake8-warning",
"severity": "warning",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDNW]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
}
]
}

View File

@ -0,0 +1,16 @@
{
"problemMatcher": [
{
"owner": "hadolint",
"pattern": [
{
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
"file": 1,
"line": 2,
"message": 3,
"code": 4
}
]
}
]
}

16
.github/workflows/matchers/mypy.json vendored Normal file
View File

@ -0,0 +1,16 @@
{
"problemMatcher": [
{
"owner": "mypy",
"pattern": [
{
"regexp": "^(.+):(\\d+):\\s(error|warning):\\s(.+)$",
"file": 1,
"line": 2,
"severity": 3,
"message": 4
}
]
}
]
}

32
.github/workflows/matchers/pylint.json vendored Normal file
View File

@ -0,0 +1,32 @@
{
"problemMatcher": [
{
"owner": "pylint-error",
"severity": "error",
"pattern": [
{
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4,
"code": 5
}
]
},
{
"owner": "pylint-warning",
"severity": "warning",
"pattern": [
{
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4,
"code": 5
}
]
}
]
}

18
.github/workflows/matchers/python.json vendored Normal file
View File

@ -0,0 +1,18 @@
{
"problemMatcher": [
{
"owner": "python",
"pattern": [
{
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
"file": 1,
"line": 2
},
{
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
"message": 2
}
]
}
]
}

View File

@ -0,0 +1,22 @@
{
"problemMatcher": [
{
"owner": "yamllint",
"pattern": [
{
"regexp": "^(.*\\.ya?ml)$",
"file": 1
},
{
"regexp": "^\\s{2}(\\d+):(\\d+)\\s+(error|warning)\\s+(.*?)\\s+\\((.*)\\)$",
"line": 1,
"column": 2,
"severity": 3,
"message": 4,
"code": 5,
"loop": true
}
]
}
]
}

View File

@ -22,7 +22,7 @@ repos:
- --quiet-level=2
exclude_types: [csv, json]
- repo: https://gitlab.com/pycqa/flake8
rev: 3.8.1
rev: 3.8.3
hooks:
- id: flake8
additional_dependencies:

View File

@ -1,4 +1,3 @@
sudo: false
dist: bionic
addons:
apt:
@ -14,22 +13,30 @@ addons:
sources:
- sourceline: ppa:savoury1/ffmpeg4
matrix:
python:
- "3.7.1"
- "3.8"
env:
- TOX_ARGS="-- --test-group-count 4 --test-group 1"
- TOX_ARGS="-- --test-group-count 4 --test-group 2"
- TOX_ARGS="-- --test-group-count 4 --test-group 3"
- TOX_ARGS="-- --test-group-count 4 --test-group 4"
jobs:
fast_finish: true
include:
- python: "3.7.0"
- python: "3.7.1"
env: TOXENV=lint
- python: "3.7.0"
- python: "3.7.1"
env: TOXENV=pylint PYLINT_ARGS=--jobs=0 TRAVIS_WAIT=30
- python: "3.7.0"
- python: "3.7.1"
env: TOXENV=typing
- python: "3.7.0"
env: TOXENV=py37
cache:
pip: true
directories:
- $HOME/.cache/pre-commit
install: pip install -U tox
install: pip install -U tox tox-travis
language: python
script: ${TRAVIS_WAIT:+travis_wait $TRAVIS_WAIT} tox --develop
script: ${TRAVIS_WAIT:+travis_wait $TRAVIS_WAIT} tox --develop ${TOX_ARGS-}

4
.vscode/tasks.json vendored
View File

@ -76,7 +76,7 @@
{
"label": "Install all Requirements",
"type": "shell",
"command": "pip3 install -r requirements_all.txt -c homeassistant/package_constraints.txt",
"command": "pip3 install -r requirements_all.txt",
"group": {
"kind": "build",
"isDefault": true
@ -90,7 +90,7 @@
{
"label": "Install all Test Requirements",
"type": "shell",
"command": "pip3 install -r requirements_test_all.txt -c homeassistant/package_constraints.txt",
"command": "pip3 install -r requirements_test_all.txt",
"group": {
"kind": "build",
"isDefault": true

View File

@ -23,7 +23,6 @@ homeassistant/components/alarmdecoder/* @ajschmidt8
homeassistant/components/alexa/* @home-assistant/cloud @ochlocracy
homeassistant/components/almond/* @gcampax @balloob
homeassistant/components/alpha_vantage/* @fabaff
homeassistant/components/amazon_polly/* @robbiet480
homeassistant/components/ambiclimate/* @danielhiversen
homeassistant/components/ambient_station/* @bachya
homeassistant/components/amcrest/* @pnbruckner
@ -47,7 +46,7 @@ homeassistant/components/automation/* @home-assistant/core
homeassistant/components/avea/* @pattyland
homeassistant/components/avri/* @timvancann
homeassistant/components/awair/* @ahayworth @danielsjf
homeassistant/components/aws/* @awarecan @robbiet480
homeassistant/components/aws/* @awarecan
homeassistant/components/axis/* @Kane610
homeassistant/components/azure_event_hub/* @eavanvalkenburg
homeassistant/components/azure_service_bus/* @hfurubotten
@ -59,6 +58,7 @@ homeassistant/components/blink/* @fronzbot
homeassistant/components/bmp280/* @belidzs
homeassistant/components/bmw_connected_drive/* @gerard33 @rikroe
homeassistant/components/bom/* @maddenp
homeassistant/components/bond/* @prystupa
homeassistant/components/braviatv/* @bieniu
homeassistant/components/broadlink/* @danielhiversen @felipediel
homeassistant/components/brother/* @bieniu
@ -94,6 +94,7 @@ homeassistant/components/denonavr/* @scarface-4711 @starkillerOG
homeassistant/components/derivative/* @afaucogney
homeassistant/components/device_automation/* @home-assistant/core
homeassistant/components/devolo_home_control/* @2Fake @Shutgun
homeassistant/components/dexcom/* @gagebenne
homeassistant/components/digital_ocean/* @fabaff
homeassistant/components/directv/* @ctalkington
homeassistant/components/discogs/* @thibmaek
@ -127,7 +128,6 @@ homeassistant/components/ezviz/* @baqs
homeassistant/components/fastdotcom/* @rohankapoorcom
homeassistant/components/file/* @fabaff
homeassistant/components/filter/* @dgomes
homeassistant/components/fitbit/* @robbiet480
homeassistant/components/fixer/* @fabaff
homeassistant/components/flick_electric/* @ZephireNZ
homeassistant/components/flock/* @fabaff
@ -136,7 +136,6 @@ homeassistant/components/flunearyou/* @bachya
homeassistant/components/forked_daapd/* @uvjustin
homeassistant/components/fortios/* @kimfrellsen
homeassistant/components/foscam/* @skgsergio
homeassistant/components/foursquare/* @robbiet480
homeassistant/components/freebox/* @snoof85 @Quentame
homeassistant/components/fronius/* @nielstron
homeassistant/components/frontend/* @home-assistant/frontend
@ -149,18 +148,15 @@ homeassistant/components/geonetnz_volcano/* @exxamalte
homeassistant/components/gios/* @bieniu
homeassistant/components/gitter/* @fabaff
homeassistant/components/glances/* @fabaff @engrbm87
homeassistant/components/gntp/* @robbiet480
homeassistant/components/gogogate2/* @vangorra
homeassistant/components/google_assistant/* @home-assistant/cloud
homeassistant/components/google_cloud/* @lufton
homeassistant/components/google_translate/* @awarecan
homeassistant/components/google_travel_time/* @robbiet480
homeassistant/components/gpsd/* @fabaff
homeassistant/components/greeneye_monitor/* @jkeljo
homeassistant/components/griddy/* @bdraco
homeassistant/components/group/* @home-assistant/core
homeassistant/components/growatt_server/* @indykoning
homeassistant/components/gtfs/* @robbiet480
homeassistant/components/guardian/* @bachya
homeassistant/components/harmony/* @ehendrix23 @bramkragten @bdraco
homeassistant/components/hassio/* @home-assistant/hass-io
@ -179,11 +175,10 @@ homeassistant/components/homekit_controller/* @Jc2k
homeassistant/components/homematic/* @pvizeli @danielperna84
homeassistant/components/homematicip_cloud/* @SukramJ
homeassistant/components/honeywell/* @zxdavb
homeassistant/components/html5/* @robbiet480
homeassistant/components/http/* @home-assistant/core
homeassistant/components/huawei_lte/* @scop @fphammerle
homeassistant/components/huawei_router/* @abmantis
homeassistant/components/hue/* @balloob
homeassistant/components/hue/* @balloob @frenck
homeassistant/components/humidifier/* @home-assistant/core @Shulyaka
homeassistant/components/hunterdouglas_powerview/* @bdraco
homeassistant/components/hvv_departures/* @vigonotion
@ -193,7 +188,7 @@ homeassistant/components/iaqualink/* @flz
homeassistant/components/icloud/* @Quentame
homeassistant/components/ign_sismologia/* @exxamalte
homeassistant/components/incomfort/* @zxdavb
homeassistant/components/influxdb/* @fabaff
homeassistant/components/influxdb/* @fabaff @mdegat01
homeassistant/components/input_boolean/* @home-assistant/core
homeassistant/components/input_datetime/* @home-assistant/core
homeassistant/components/input_number/* @home-assistant/core
@ -318,6 +313,7 @@ homeassistant/components/plex/* @jjlawren
homeassistant/components/plugwise/* @CoMPaTech @bouwew
homeassistant/components/plum_lightpad/* @ColinHarrington @prystupa
homeassistant/components/point/* @fredrike
homeassistant/components/poolsense/* @haemishkyd
homeassistant/components/powerwall/* @bdraco @jrester
homeassistant/components/prometheus/* @knyar
homeassistant/components/proxmoxve/* @k4ds3 @jhollowe
@ -338,7 +334,7 @@ homeassistant/components/rainforest_eagle/* @gtdiehl @jcalbert
homeassistant/components/rainmachine/* @bachya
homeassistant/components/random/* @fabaff
homeassistant/components/repetier/* @MTrab
homeassistant/components/rfxtrx/* @danielhiversen
homeassistant/components/rfxtrx/* @danielhiversen @elupus
homeassistant/components/ring/* @balloob
homeassistant/components/rmvtransport/* @cgtobi
homeassistant/components/roku/* @ctalkington
@ -431,8 +427,6 @@ homeassistant/components/transmission/* @engrbm87 @JPHutchins
homeassistant/components/tts/* @pvizeli
homeassistant/components/tuya/* @ollo69
homeassistant/components/twentemilieu/* @frenck
homeassistant/components/twilio_call/* @robbiet480
homeassistant/components/twilio_sms/* @robbiet480
homeassistant/components/ubee/* @mzdrale
homeassistant/components/unifi/* @Kane610
homeassistant/components/unifiled/* @florisvdk
@ -478,7 +472,7 @@ homeassistant/components/yeelightsunflower/* @lindsaymarkward
homeassistant/components/yessssms/* @flowolf
homeassistant/components/yi/* @bachya
homeassistant/components/yr/* @danielhiversen
homeassistant/components/zeroconf/* @robbiet480 @Kane610
homeassistant/components/zeroconf/* @Kane610
homeassistant/components/zerproc/* @emlove
homeassistant/components/zha/* @dmulcahey @adminiuga
homeassistant/components/zone/* @home-assistant/core

View File

@ -10,9 +10,10 @@ WORKDIR /usr/src
COPY . homeassistant/
RUN \
pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
-r homeassistant/requirements_all.txt -c homeassistant/homeassistant/package_constraints.txt \
-r homeassistant/requirements_all.txt \
&& pip3 uninstall -y typing \
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
-e ./homeassistant \
-e ./homeassistant \
&& python3 -m compileall homeassistant/homeassistant
# Home Assistant S6-Overlay

View File

@ -23,9 +23,10 @@ RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
WORKDIR /workspaces
# Install Python dependencies from requirements
COPY requirements_test.txt requirements_test_pre_commit.txt homeassistant/package_constraints.txt ./
RUN pip3 install -r requirements_test.txt -c package_constraints.txt \
&& rm -f requirements_test.txt package_constraints.txt requirements_test_pre_commit.txt
COPY requirements_test.txt requirements_test_pre_commit.txt ./
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
RUN pip3 install -r requirements_test.txt \
&& rm -rf requirements_test.txt requirements_test_pre_commit.txt homeassistant/
# Set the default shell to bash instead of sh
ENV SHELL /bin/bash

View File

@ -44,7 +44,7 @@ stages:
python -m venv venv
. venv/bin/activate
pip install -r requirements_test.txt -c homeassistant/package_constraints.txt
pip install -r requirements_test.txt
pre-commit install-hooks
- script: |
. venv/bin/activate
@ -117,7 +117,7 @@ stages:
python -m venv venv
. venv/bin/activate
pip install -r requirements_test.txt -c homeassistant/package_constraints.txt
pip install -r requirements_test.txt
pre-commit install-hooks
- script: |
. venv/bin/activate
@ -165,7 +165,7 @@ stages:
. venv/bin/activate
pip install -U pip setuptools pytest-azurepipelines pytest-xdist -c homeassistant/package_constraints.txt
pip install -r requirements_test_all.txt -c homeassistant/package_constraints.txt
pip install -r requirements_test_all.txt
# This is a TEMP. Eventually we should make sure our 4 dependencies drop typing.
# Find offending deps with `pipdeptree -r -p typing`
pip uninstall -y typing
@ -209,8 +209,8 @@ stages:
. venv/bin/activate
pip install -U pip setuptools wheel
pip install -r requirements_all.txt -c homeassistant/package_constraints.txt
pip install -r requirements_test.txt -c homeassistant/package_constraints.txt
pip install -r requirements_all.txt
pip install -r requirements_test.txt
# This is a TEMP. Eventually we should make sure our 4 dependencies drop typing.
# Find offending deps with `pipdeptree -r -p typing`
pip uninstall -y typing
@ -234,7 +234,7 @@ stages:
python -m venv venv
. venv/bin/activate
pip install -e . -r requirements_test.txt -c homeassistant/package_constraints.txt
pip install -e . -r requirements_test.txt
pre-commit install-hooks
- script: |
. venv/bin/activate

View File

@ -17,7 +17,7 @@ schedules:
- dev
variables:
- name: versionWheels
value: '1.10.1-3.7-alpine3.11'
value: '1.13.0-3.8-alpine3.12'
resources:
repositories:
- repository: azure

View File

@ -1,11 +1,11 @@
{
"image": "homeassistant/{arch}-homeassistant",
"build_from": {
"aarch64": "homeassistant/aarch64-homeassistant-base:7.2.0",
"armhf": "homeassistant/armhf-homeassistant-base:7.2.0",
"armv7": "homeassistant/armv7-homeassistant-base:7.2.0",
"amd64": "homeassistant/amd64-homeassistant-base:7.2.0",
"i386": "homeassistant/i386-homeassistant-base:7.2.0"
"aarch64": "homeassistant/aarch64-homeassistant-base:8.0.0",
"armhf": "homeassistant/armhf-homeassistant-base:8.0.0",
"armv7": "homeassistant/armv7-homeassistant-base:8.0.0",
"amd64": "homeassistant/amd64-homeassistant-base:8.0.0",
"i386": "homeassistant/i386-homeassistant-base:8.0.0"
},
"labels": {
"io.hass.type": "core"

View File

@ -1,6 +1,5 @@
"""Start Home Assistant."""
import argparse
import asyncio
import os
import platform
import subprocess
@ -8,32 +7,9 @@ import sys
import threading
from typing import List
import yarl
from homeassistant.const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
def set_loop() -> None:
"""Attempt to use different loop."""
# pylint: disable=import-outside-toplevel
from asyncio.events import BaseDefaultEventLoopPolicy
if sys.platform == "win32":
if hasattr(asyncio, "WindowsProactorEventLoopPolicy"):
# pylint: disable=no-member
policy = asyncio.WindowsProactorEventLoopPolicy()
else:
class ProactorPolicy(BaseDefaultEventLoopPolicy):
"""Event loop policy to create proactor loops."""
_loop_factory = asyncio.ProactorEventLoop
policy = ProactorPolicy()
asyncio.set_event_loop_policy(policy)
def validate_python() -> None:
"""Validate that the right Python version is running."""
if sys.version_info[:3] < REQUIRED_PYTHON_VER:
@ -240,39 +216,6 @@ def cmdline() -> List[str]:
return [arg for arg in sys.argv if arg != "--daemon"]
async def setup_and_run_hass(config_dir: str, args: argparse.Namespace) -> int:
"""Set up Home Assistant and run."""
# pylint: disable=import-outside-toplevel
from homeassistant import bootstrap
hass = await bootstrap.async_setup_hass(
config_dir=config_dir,
verbose=args.verbose,
log_rotate_days=args.log_rotate_days,
log_file=args.log_file,
log_no_color=args.log_no_color,
skip_pip=args.skip_pip,
safe_mode=args.safe_mode,
)
if hass is None:
return 1
if args.open_ui:
import webbrowser # pylint: disable=import-outside-toplevel
if hass.config.api is not None:
scheme = "https" if hass.config.api.use_ssl else "http"
url = str(
yarl.URL.build(
scheme=scheme, host="127.0.0.1", port=hass.config.api.port
)
)
hass.add_job(webbrowser.open, url)
return await hass.async_run()
def try_to_restart() -> None:
"""Attempt to clean up state and start a new Home Assistant instance."""
# Things should be mostly shut down already at this point, now just try
@ -319,8 +262,6 @@ def main() -> int:
"""Start Home Assistant."""
validate_python()
set_loop()
# Run a simple daemon runner process on Windows to handle restarts
if os.name == "nt" and "--runner" not in sys.argv:
nt_args = cmdline() + ["--runner"]
@ -353,7 +294,22 @@ def main() -> int:
if args.pid_file:
write_pid(args.pid_file)
exit_code = asyncio.run(setup_and_run_hass(config_dir, args), debug=args.debug)
# pylint: disable=import-outside-toplevel
from homeassistant import runner
runtime_conf = runner.RuntimeConfig(
config_dir=config_dir,
verbose=args.verbose,
log_rotate_days=args.log_rotate_days,
log_file=args.log_file,
log_no_color=args.log_no_color,
skip_pip=args.skip_pip,
safe_mode=args.safe_mode,
debug=args.debug,
open_ui=args.open_ui,
)
exit_code = runner.run(runtime_conf)
if exit_code == RESTART_EXIT_CODE and not args.runner:
try_to_restart()

View File

@ -77,10 +77,10 @@ def _verify_otp(secret: str, otp: str, count: int) -> bool:
class NotifySetting:
"""Store notify setting for one user."""
secret = attr.ib(type=str, factory=_generate_secret) # not persistent
counter = attr.ib(type=int, factory=_generate_random) # not persistent
notify_service = attr.ib(type=Optional[str], default=None)
target = attr.ib(type=Optional[str], default=None)
secret: str = attr.ib(factory=_generate_secret) # not persistent
counter: int = attr.ib(factory=_generate_random) # not persistent
notify_service: Optional[str] = attr.ib(default=None)
target: Optional[str] = attr.ib(default=None)
_UsersDict = Dict[str, NotifySetting]

View File

@ -20,39 +20,35 @@ TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
class Group:
"""A group."""
name = attr.ib(type=Optional[str])
policy = attr.ib(type=perm_mdl.PolicyType)
id = attr.ib(type=str, factory=lambda: uuid.uuid4().hex)
system_generated = attr.ib(type=bool, default=False)
name: Optional[str] = attr.ib()
policy: perm_mdl.PolicyType = attr.ib()
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
system_generated: bool = attr.ib(default=False)
@attr.s(slots=True)
class User:
"""A user."""
name = attr.ib(type=Optional[str])
perm_lookup = attr.ib(type=perm_mdl.PermissionLookup, eq=False, order=False)
id = attr.ib(type=str, factory=lambda: uuid.uuid4().hex)
is_owner = attr.ib(type=bool, default=False)
is_active = attr.ib(type=bool, default=False)
system_generated = attr.ib(type=bool, default=False)
name: Optional[str] = attr.ib()
perm_lookup: perm_mdl.PermissionLookup = attr.ib(eq=False, order=False)
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
is_owner: bool = attr.ib(default=False)
is_active: bool = attr.ib(default=False)
system_generated: bool = attr.ib(default=False)
groups = attr.ib(type=List[Group], factory=list, eq=False, order=False)
groups: List[Group] = attr.ib(factory=list, eq=False, order=False)
# List of credentials of a user.
credentials = attr.ib(type=List["Credentials"], factory=list, eq=False, order=False)
credentials: List["Credentials"] = attr.ib(factory=list, eq=False, order=False)
# Tokens associated with a user.
refresh_tokens = attr.ib(
type=Dict[str, "RefreshToken"], factory=dict, eq=False, order=False
refresh_tokens: Dict[str, "RefreshToken"] = attr.ib(
factory=dict, eq=False, order=False
)
_permissions = attr.ib(
type=Optional[perm_mdl.PolicyPermissions],
init=False,
eq=False,
order=False,
default=None,
_permissions: Optional[perm_mdl.PolicyPermissions] = attr.ib(
init=False, eq=False, order=False, default=None,
)
@property
@ -88,39 +84,38 @@ class User:
class RefreshToken:
"""RefreshToken for a user to grant new access tokens."""
user = attr.ib(type=User)
client_id = attr.ib(type=Optional[str])
access_token_expiration = attr.ib(type=timedelta)
client_name = attr.ib(type=Optional[str], default=None)
client_icon = attr.ib(type=Optional[str], default=None)
token_type = attr.ib(
type=str,
user: User = attr.ib()
client_id: Optional[str] = attr.ib()
access_token_expiration: timedelta = attr.ib()
client_name: Optional[str] = attr.ib(default=None)
client_icon: Optional[str] = attr.ib(default=None)
token_type: str = attr.ib(
default=TOKEN_TYPE_NORMAL,
validator=attr.validators.in_(
(TOKEN_TYPE_NORMAL, TOKEN_TYPE_SYSTEM, TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN)
),
)
id = attr.ib(type=str, factory=lambda: uuid.uuid4().hex)
created_at = attr.ib(type=datetime, factory=dt_util.utcnow)
token = attr.ib(type=str, factory=lambda: secrets.token_hex(64))
jwt_key = attr.ib(type=str, factory=lambda: secrets.token_hex(64))
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
created_at: datetime = attr.ib(factory=dt_util.utcnow)
token: str = attr.ib(factory=lambda: secrets.token_hex(64))
jwt_key: str = attr.ib(factory=lambda: secrets.token_hex(64))
last_used_at = attr.ib(type=Optional[datetime], default=None)
last_used_ip = attr.ib(type=Optional[str], default=None)
last_used_at: Optional[datetime] = attr.ib(default=None)
last_used_ip: Optional[str] = attr.ib(default=None)
@attr.s(slots=True)
class Credentials:
"""Credentials for a user on an auth provider."""
auth_provider_type = attr.ib(type=str)
auth_provider_id = attr.ib(type=Optional[str])
auth_provider_type: str = attr.ib()
auth_provider_id: Optional[str] = attr.ib()
# Allow the auth provider to store data to represent their auth.
data = attr.ib(type=dict)
data: dict = attr.ib()
id = attr.ib(type=str, factory=lambda: uuid.uuid4().hex)
is_new = attr.ib(type=bool, default=True)
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
is_new: bool = attr.ib(default=True)
class UserMeta(NamedTuple):

View File

@ -13,5 +13,5 @@ if TYPE_CHECKING:
class PermissionLookup:
"""Class to hold data for permission lookups."""
entity_registry = attr.ib(type="ent_reg.EntityRegistry")
device_registry = attr.ib(type="dev_reg.DeviceRegistry")
entity_registry: "ent_reg.EntityRegistry" = attr.ib()
device_registry: "dev_reg.DeviceRegistry" = attr.ib()

View File

@ -75,7 +75,7 @@ class CommandLineAuthProvider(AuthProvider):
if process.returncode != 0:
_LOGGER.error(
"User %r failed to authenticate, command exited with code %d.",
"User %r failed to authenticate, command exited with code %d",
username,
process.returncode,
)

View File

@ -190,7 +190,7 @@ class TrustedNetworksLoginFlow(LoginFlow):
).async_validate_access(self._ip_address)
except InvalidAuthError:
return self.async_abort(reason="not_whitelisted")
return self.async_abort(reason="not_allowed")
if user_input is not None:
return await self.async_finish(user_input)

View File

@ -7,10 +7,11 @@ import logging.handlers
import os
import sys
from time import monotonic
from typing import Any, Dict, Optional, Set
from typing import TYPE_CHECKING, Any, Dict, Optional, Set
from async_timeout import timeout
import voluptuous as vol
import yarl
from homeassistant import config as conf_util, config_entries, core, loader
from homeassistant.components import http
@ -31,6 +32,9 @@ from homeassistant.util.logging import async_activate_log_queue_handler
from homeassistant.util.package import async_get_user_site, is_virtual_env
from homeassistant.util.yaml import clear_secret_cache
if TYPE_CHECKING:
from .runner import RuntimeConfig
_LOGGER = logging.getLogger(__name__)
ERROR_LOG_FILENAME = "home-assistant.log"
@ -66,23 +70,22 @@ STAGE_1_INTEGRATIONS = {
async def async_setup_hass(
*,
config_dir: str,
verbose: bool,
log_rotate_days: int,
log_file: str,
log_no_color: bool,
skip_pip: bool,
safe_mode: bool,
runtime_config: "RuntimeConfig",
) -> Optional[core.HomeAssistant]:
"""Set up Home Assistant."""
hass = core.HomeAssistant()
hass.config.config_dir = config_dir
hass.config.config_dir = runtime_config.config_dir
async_enable_logging(hass, verbose, log_rotate_days, log_file, log_no_color)
async_enable_logging(
hass,
runtime_config.verbose,
runtime_config.log_rotate_days,
runtime_config.log_file,
runtime_config.log_no_color,
)
hass.config.skip_pip = skip_pip
if skip_pip:
hass.config.skip_pip = runtime_config.skip_pip
if runtime_config.skip_pip:
_LOGGER.warning(
"Skipping pip installation of required modules. This may cause issues"
)
@ -91,10 +94,11 @@ async def async_setup_hass(
_LOGGER.error("Error getting configuration path")
return None
_LOGGER.info("Config directory: %s", config_dir)
_LOGGER.info("Config directory: %s", runtime_config.config_dir)
config_dict = None
basic_setup_success = False
safe_mode = runtime_config.safe_mode
if not safe_mode:
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
@ -107,7 +111,7 @@ async def async_setup_hass(
)
else:
if not is_virtual_env():
await async_mount_local_lib_path(config_dir)
await async_mount_local_lib_path(runtime_config.config_dir)
basic_setup_success = (
await async_from_config_dict(config_dict, hass) is not None
@ -137,6 +141,7 @@ async def async_setup_hass(
safe_mode = True
old_config = hass.config
hass = core.HomeAssistant()
hass.config.skip_pip = old_config.skip_pip
hass.config.internal_url = old_config.internal_url
@ -153,9 +158,32 @@ async def async_setup_hass(
{"safe_mode": {}, "http": http_conf}, hass,
)
if runtime_config.open_ui:
hass.add_job(open_hass_ui, hass)
return hass
def open_hass_ui(hass: core.HomeAssistant) -> None:
"""Open the UI."""
import webbrowser # pylint: disable=import-outside-toplevel
if hass.config.api is None or "frontend" not in hass.config.components:
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
return
scheme = "https" if hass.config.api.use_ssl else "http"
url = str(
yarl.URL.build(scheme=scheme, host="127.0.0.1", port=hass.config.api.port)
)
if not webbrowser.open(url):
_LOGGER.warning(
"Unable to open the Home Assistant UI in a browser. Open it yourself at %s",
url,
)
async def async_from_config_dict(
config: ConfigType, hass: core.HomeAssistant
) -> Optional[core.HomeAssistant]:

View File

@ -37,7 +37,7 @@ def is_on(hass, entity_id=None):
continue
if not hasattr(component, "is_on"):
_LOGGER.warning("Integration %s has no is_on method.", domain)
_LOGGER.warning("Integration %s has no is_on method", domain)
continue
if component.is_on(ent_id):

View File

@ -61,7 +61,7 @@ class AcmedaCover(AcmedaBase, CoverEntity):
None is unknown, 0 is closed, 100 is fully open.
"""
position = None
if self.roller.type == 7 or self.roller.type == 10:
if self.roller.type in [7, 10]:
position = 100 - self.roller.closed_percent
return position
@ -86,37 +86,36 @@ class AcmedaCover(AcmedaBase, CoverEntity):
@property
def is_closed(self):
"""Return if the cover is closed."""
is_closed = self.roller.closed_percent == 100
return is_closed
return self.roller.closed_percent == 100
async def close_cover(self, **kwargs):
async def async_close_cover(self, **kwargs):
"""Close the roller."""
await self.roller.move_down()
async def open_cover(self, **kwargs):
async def async_open_cover(self, **kwargs):
"""Open the roller."""
await self.roller.move_up()
async def stop_cover(self, **kwargs):
async def async_stop_cover(self, **kwargs):
"""Stop the roller."""
await self.roller.move_stop()
async def set_cover_position(self, **kwargs):
async def async_set_cover_position(self, **kwargs):
"""Move the roller shutter to a specific position."""
await self.roller.move_to(100 - kwargs[ATTR_POSITION])
async def close_cover_tilt(self, **kwargs):
async def async_close_cover_tilt(self, **kwargs):
"""Close the roller."""
await self.roller.move_down()
async def open_cover_tilt(self, **kwargs):
async def async_open_cover_tilt(self, **kwargs):
"""Open the roller."""
await self.roller.move_up()
async def stop_cover_tilt(self, **kwargs):
async def async_stop_cover_tilt(self, **kwargs):
"""Stop the roller."""
await self.roller.move_stop()
async def set_cover_tilt(self, **kwargs):
async def async_set_cover_tilt(self, **kwargs):
"""Tilt the roller shutter to a specific position."""
await self.roller.move_to(100 - kwargs[ATTR_POSITION])

View File

@ -183,7 +183,7 @@ class AdGuardHomeEntity(Entity):
except AdGuardHomeError:
if self._available:
_LOGGER.debug(
"An error occurred while updating AdGuard Home sensor.",
"An error occurred while updating AdGuard Home sensor",
exc_info=True,
)
self._available = False

View File

@ -73,7 +73,7 @@ class AdGuardHomeSwitch(AdGuardHomeDeviceEntity, SwitchEntity):
try:
await self._adguard_turn_off()
except AdGuardHomeError:
_LOGGER.error("An error occurred while turning off AdGuard Home switch.")
_LOGGER.error("An error occurred while turning off AdGuard Home switch")
self._available = False
async def _adguard_turn_off(self) -> None:
@ -85,7 +85,7 @@ class AdGuardHomeSwitch(AdGuardHomeDeviceEntity, SwitchEntity):
try:
await self._adguard_turn_on()
except AdGuardHomeError:
_LOGGER.error("An error occurred while turning on AdGuard Home switch.")
_LOGGER.error("An error occurred while turning on AdGuard Home switch")
self._available = False
async def _adguard_turn_on(self) -> None:

View File

@ -4,6 +4,14 @@
"hassio_confirm": {
"description": "Chcete nakonfigurovat slu\u017ebu Home Assistant pro p\u0159ipojen\u00ed k AddGuard pomoc\u00ed hass.io {addon}?",
"title": "AdGuard prost\u0159ednictv\u00edm dopl\u0148ku Hass.io"
},
"user": {
"data": {
"host": "Hostitel",
"password": "Heslo",
"port": "Port",
"username": "U\u017eivatelsk\u00e9 jm\u00e9no"
}
}
}
}

View File

@ -1,10 +1,17 @@
{
"config": {
"error": {
"connection_error": "Falha na liga\u00e7\u00e3o"
},
"step": {
"hassio_confirm": {
"title": "AdGuard Home via Hass.io add-on"
},
"user": {
"data": {
"host": "Servidor",
"password": "Palavra-passe",
"port": "Porta",
"username": "Nome de Utilizador"
}
}

View File

@ -2,6 +2,6 @@
"domain": "ads",
"name": "ADS",
"documentation": "https://www.home-assistant.io/integrations/ads",
"requirements": ["pyads==3.0.7"],
"requirements": ["pyads==3.1.3"],
"codeowners": []
}

View File

@ -74,8 +74,8 @@ class AgentCamera(MjpegCamera):
device_info = {
CONF_NAME: device.name,
CONF_MJPEG_URL: f"{self.server_url}{device.mjpeg_image_url}&size=640x480",
CONF_STILL_IMAGE_URL: f"{self.server_url}{device.still_image_url}&size=640x480",
CONF_MJPEG_URL: f"{self.server_url}{device.mjpeg_image_url}&size={device.mjpegStreamWidth}x{device.mjpegStreamHeight}",
CONF_STILL_IMAGE_URL: f"{self.server_url}{device.still_image_url}&size={device.mjpegStreamWidth}x{device.mjpegStreamHeight}",
}
self.device = device
self._removed = False

View File

@ -2,7 +2,7 @@
"domain": "agent_dvr",
"name": "Agent DVR",
"documentation": "https://www.home-assistant.io/integrations/agent_dvr/",
"requirements": ["agent-py==0.0.20"],
"requirements": ["agent-py==0.0.23"],
"config_flow": true,
"codeowners": ["@ispysoftware"]
}

View File

@ -0,0 +1,12 @@
{
"config": {
"step": {
"user": {
"data": {
"host": "Hostitel",
"port": "Port"
}
}
}
}
}

View File

@ -10,7 +10,7 @@
"step": {
"user": {
"data": {
"host": "H\u00f4te",
"host": "Nom d'h\u00f4te ou adresse IP",
"port": "Port"
},
"title": "Configurer l'agent DVR"

View File

@ -0,0 +1,11 @@
{
"config": {
"step": {
"user": {
"data": {
"api_key": "Kl\u00ed\u010d API"
}
}
}
}
}

View File

@ -0,0 +1,16 @@
{
"config": {
"step": {
"geography": {
"data": {
"api_key": "Kl\u00ed\u010d API"
}
},
"node_pro": {
"data": {
"password": "Heslo"
}
}
}
}
}

View File

@ -0,0 +1,11 @@
{
"config": {
"step": {
"node_pro": {
"data": {
"password": "Palavra-passe"
}
}
}
}
}

View File

@ -8,7 +8,7 @@ alarm_disarm:
example: "alarm_control_panel.downstairs"
code:
description: An optional code to disarm the alarm control panel with.
example: 1234
example: "1234"
alarm_arm_custom_bypass:
description: Send arm custom bypass command.
@ -18,7 +18,7 @@ alarm_arm_custom_bypass:
example: "alarm_control_panel.downstairs"
code:
description: An optional code to arm custom bypass the alarm control panel with.
example: 1234
example: "1234"
alarm_arm_home:
description: Send the alarm the command for arm home.
@ -28,7 +28,7 @@ alarm_arm_home:
example: "alarm_control_panel.downstairs"
code:
description: An optional code to arm home the alarm control panel with.
example: 1234
example: "1234"
alarm_arm_away:
description: Send the alarm the command for arm away.
@ -38,7 +38,7 @@ alarm_arm_away:
example: "alarm_control_panel.downstairs"
code:
description: An optional code to arm away the alarm control panel with.
example: 1234
example: "1234"
alarm_arm_night:
description: Send the alarm the command for arm night.
@ -48,7 +48,7 @@ alarm_arm_night:
example: "alarm_control_panel.downstairs"
code:
description: An optional code to arm night the alarm control panel with.
example: 1234
example: "1234"
alarm_trigger:
description: Send the alarm the command for trigger.
@ -58,4 +58,4 @@ alarm_trigger:
example: "alarm_control_panel.downstairs"
code:
description: An optional code to trigger the alarm control panel with.
example: 1234
example: "1234"

View File

@ -18,7 +18,7 @@
"armed_away": "{entity_name} armada ausente",
"armed_home": "{entity_name} armada en casa",
"armed_night": "{entity_name} armada noche",
"disarmed": "{entity_name} desarmado",
"disarmed": "{entity_name} desarmada",
"triggered": "{entity_name} activado"
}
},

View File

@ -162,7 +162,7 @@ def setup(hass, config):
if not restart:
return
restart = False
_LOGGER.warning("AlarmDecoder unexpectedly lost connection.")
_LOGGER.warning("AlarmDecoder unexpectedly lost connection")
hass.add_job(open_connection)
def handle_message(sender, message):

View File

@ -199,8 +199,8 @@ class Alert(ToggleEntity):
self._send_done_message = False
self.entity_id = f"{DOMAIN}.{entity_id}"
event.async_track_state_change(
hass, watched_entity_id, self.watched_entity_change
event.async_track_state_change_event(
hass, [watched_entity_id], self.watched_entity_change
)
@property
@ -222,9 +222,12 @@ class Alert(ToggleEntity):
return STATE_ON
return STATE_IDLE
async def watched_entity_change(self, entity, from_state, to_state):
async def watched_entity_change(self, ev):
"""Determine if the alert should start or stop."""
_LOGGER.debug("Watched entity (%s) has changed", entity)
to_state = ev.data.get("new_state")
if to_state is None:
return
_LOGGER.debug("Watched entity (%s) has changed", ev.data.get("entity_id"))
if to_state.state == self._alert_state and not self._firing:
await self.begin_alerting()
if to_state.state != self._alert_state and self._firing:

View File

@ -70,11 +70,11 @@ class Auth:
await self.async_load_preferences()
if self.is_token_valid():
_LOGGER.debug("Token still valid, using it.")
_LOGGER.debug("Token still valid, using it")
return self._prefs[STORAGE_ACCESS_TOKEN]
if self._prefs[STORAGE_REFRESH_TOKEN] is None:
_LOGGER.debug("Token invalid and no refresh token available.")
_LOGGER.debug("Token invalid and no refresh token available")
return None
lwa_params = {
@ -84,7 +84,7 @@ class Auth:
CONF_CLIENT_SECRET: self.client_secret,
}
_LOGGER.debug("Calling LWA to refresh the access token.")
_LOGGER.debug("Calling LWA to refresh the access token")
return await self._async_request_new_token(lwa_params)
@callback
@ -113,14 +113,14 @@ class Auth:
)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Timeout calling LWA to get auth token.")
_LOGGER.error("Timeout calling LWA to get auth token")
return None
_LOGGER.debug("LWA response header: %s", response.headers)
_LOGGER.debug("LWA response status: %s", response.status)
if response.status != HTTP_OK:
_LOGGER.error("Error calling LWA to get auth token.")
_LOGGER.error("Error calling LWA to get auth token")
return None
response_json = await response.json()

View File

@ -590,9 +590,8 @@ class ScriptCapabilities(AlexaEntity):
def interfaces(self):
"""Yield the supported interfaces."""
can_cancel = bool(self.entity.attributes.get("can_cancel"))
return [
AlexaSceneController(self.entity, supports_deactivation=can_cancel),
AlexaSceneController(self.entity, supports_deactivation=True),
Alexa(self.hass),
]

View File

@ -101,7 +101,7 @@ async def async_send_changereport_message(
)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Timeout sending report to Alexa.")
_LOGGER.error("Timeout sending report to Alexa")
return
response_text = await response.text()
@ -233,7 +233,7 @@ async def async_send_doorbell_event_message(hass, config, alexa_entity):
)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Timeout sending report to Alexa.")
_LOGGER.error("Timeout sending report to Alexa")
return
response_text = await response.text()

View File

@ -3,5 +3,5 @@
"name": "Amazon Polly",
"documentation": "https://www.home-assistant.io/integrations/amazon_polly",
"requirements": ["boto3==1.9.252"],
"codeowners": ["@robbiet480"]
"codeowners": []
}

View File

@ -0,0 +1,11 @@
{
"config": {
"step": {
"user": {
"data": {
"api_key": "Kl\u00ed\u010d API"
}
}
}
}
}

View File

@ -33,7 +33,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_s
from homeassistant.helpers.event import track_time_interval
from homeassistant.helpers.service import async_extract_entity_ids
from .binary_sensor import BINARY_SENSORS
from .binary_sensor import BINARY_POLLED_SENSORS, BINARY_SENSORS, check_binary_sensors
from .camera import CAMERA_SERVICES, STREAM_SOURCE_LIST
from .const import (
CAMERAS,
@ -98,7 +98,7 @@ AMCREST_SCHEMA = vol.Schema(
vol.Optional(CONF_FFMPEG_ARGUMENTS, default=DEFAULT_ARGUMENTS): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period,
vol.Optional(CONF_BINARY_SENSORS): vol.All(
cv.ensure_list, [vol.In(BINARY_SENSORS)], vol.Unique()
cv.ensure_list, [vol.In(BINARY_SENSORS)], vol.Unique(), check_binary_sensors
),
vol.Optional(CONF_SENSORS): vol.All(
cv.ensure_list, [vol.In(SENSORS)], vol.Unique()
@ -271,7 +271,7 @@ def setup(hass, config):
event_codes = [
BINARY_SENSORS[sensor_type][SENSOR_EVENT_CODE]
for sensor_type in binary_sensors
if BINARY_SENSORS[sensor_type][SENSOR_EVENT_CODE] is not None
if sensor_type not in BINARY_POLLED_SENSORS
]
if event_codes:
_start_event_monitor(hass, name, api, event_codes)

View File

@ -3,15 +3,18 @@ from datetime import timedelta
import logging
from amcrest import AmcrestError
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_SOUND,
BinarySensorEntity,
)
from homeassistant.const import CONF_BINARY_SENSORS, CONF_NAME
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import Throttle
from .const import (
BINARY_SENSOR_SCAN_INTERVAL_SECS,
@ -28,25 +31,48 @@ from .helpers import log_update_error, service_signal
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=BINARY_SENSOR_SCAN_INTERVAL_SECS)
_ONLINE_SCAN_INTERVAL = timedelta(seconds=60 - BINARY_SENSOR_SCAN_INTERVAL_SECS)
BINARY_SENSOR_AUDIO_DETECTED = "audio_detected"
BINARY_SENSOR_AUDIO_DETECTED_POLLED = "audio_detected_polled"
BINARY_SENSOR_MOTION_DETECTED = "motion_detected"
BINARY_SENSOR_MOTION_DETECTED_POLLED = "motion_detected_polled"
BINARY_SENSOR_ONLINE = "online"
BINARY_POLLED_SENSORS = [
BINARY_SENSOR_AUDIO_DETECTED_POLLED,
BINARY_SENSOR_MOTION_DETECTED_POLLED,
BINARY_SENSOR_ONLINE,
]
_AUDIO_DETECTED_PARAMS = ("Audio Detected", DEVICE_CLASS_SOUND, "AudioMutation")
_MOTION_DETECTED_PARAMS = ("Motion Detected", DEVICE_CLASS_MOTION, "VideoMotion")
BINARY_SENSORS = {
BINARY_SENSOR_MOTION_DETECTED: (
"Motion Detected",
DEVICE_CLASS_MOTION,
"VideoMotion",
),
BINARY_SENSOR_AUDIO_DETECTED: _AUDIO_DETECTED_PARAMS,
BINARY_SENSOR_AUDIO_DETECTED_POLLED: _AUDIO_DETECTED_PARAMS,
BINARY_SENSOR_MOTION_DETECTED: _MOTION_DETECTED_PARAMS,
BINARY_SENSOR_MOTION_DETECTED_POLLED: _MOTION_DETECTED_PARAMS,
BINARY_SENSOR_ONLINE: ("Online", DEVICE_CLASS_CONNECTIVITY, None),
}
BINARY_SENSORS = {
k: dict(zip((SENSOR_NAME, SENSOR_DEVICE_CLASS, SENSOR_EVENT_CODE), v))
for k, v in BINARY_SENSORS.items()
}
_EXCLUSIVE_OPTIONS = [
{BINARY_SENSOR_MOTION_DETECTED, BINARY_SENSOR_MOTION_DETECTED_POLLED},
]
_UPDATE_MSG = "Updating %s binary sensor"
def check_binary_sensors(value):
"""Validate binary sensor configurations."""
for exclusive_options in _EXCLUSIVE_OPTIONS:
if len(set(value) & exclusive_options) > 1:
raise vol.Invalid(
f"must contain at most one of {', '.join(exclusive_options)}."
)
return value
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up a binary sensor for an Amcrest IP Camera."""
if discovery_info is None:
@ -80,7 +106,7 @@ class AmcrestBinarySensor(BinarySensorEntity):
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return self._sensor_type == BINARY_SENSOR_ONLINE
return self._sensor_type in BINARY_POLLED_SENSORS
@property
def name(self):
@ -109,6 +135,7 @@ class AmcrestBinarySensor(BinarySensorEntity):
else:
self._update_others()
@Throttle(_ONLINE_SCAN_INTERVAL)
def _update_online(self):
if not (self._api.available or self.is_on):
return
@ -137,6 +164,11 @@ class AmcrestBinarySensor(BinarySensorEntity):
async def async_on_demand_update(self):
"""Update state."""
if self._sensor_type == BINARY_SENSOR_ONLINE:
_LOGGER.debug(_UPDATE_MSG, self._name)
self._state = self._api.available
self.async_write_ha_state()
return
self.async_schedule_update_ha_state(True)
@callback
@ -155,7 +187,7 @@ class AmcrestBinarySensor(BinarySensorEntity):
self.async_on_demand_update,
)
)
if self._event_code:
if self._event_code and self._sensor_type not in BINARY_POLLED_SENSORS:
self._unsub_dispatcher.append(
async_dispatcher_connect(
self.hass,

View File

@ -4,7 +4,7 @@ DATA_AMCREST = DOMAIN
CAMERAS = "cameras"
DEVICES = "devices"
BINARY_SENSOR_SCAN_INTERVAL_SECS = 60
BINARY_SENSOR_SCAN_INTERVAL_SECS = 5
CAMERA_WEB_SESSION_TIMEOUT = 10
COMM_RETRIES = 1
COMM_TIMEOUT = 6.05

View File

@ -3,8 +3,8 @@
"name": "Android TV",
"documentation": "https://www.home-assistant.io/integrations/androidtv",
"requirements": [
"adb-shell==0.1.3",
"androidtv==0.0.43",
"adb-shell[async]==0.2.0",
"androidtv[async]==0.0.45",
"pure-python-adb==0.2.2.dev0"
],
"codeowners": ["@JeffLIrion"]

View File

@ -5,15 +5,18 @@ import logging
import os
from adb_shell.auth.keygen import keygen
from adb_shell.auth.sign_pythonrsa import PythonRSASigner
from adb_shell.exceptions import (
AdbTimeoutError,
InvalidChecksumError,
InvalidCommandError,
InvalidResponseError,
TcpTimeoutException,
)
from androidtv import ha_state_detection_rules_validator, setup
from androidtv import ha_state_detection_rules_validator
from androidtv.constants import APPS, KEYS
from androidtv.exceptions import LockNotAcquiredException
from androidtv.setup_async import setup
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
@ -44,7 +47,7 @@ from homeassistant.const import (
STATE_STANDBY,
)
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.storage import STORAGE_DIR
ANDROIDTV_DOMAIN = "androidtv"
@ -103,6 +106,7 @@ DEVICE_CLASSES = [DEFAULT_DEVICE_CLASS, DEVICE_ANDROIDTV, DEVICE_FIRETV]
SERVICE_ADB_COMMAND = "adb_command"
SERVICE_DOWNLOAD = "download"
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
SERVICE_UPLOAD = "upload"
SERVICE_ADB_COMMAND_SCHEMA = vol.Schema(
@ -161,7 +165,30 @@ ANDROIDTV_STATES = {
}
def setup_platform(hass, config, add_entities, discovery_info=None):
def setup_androidtv(hass, config):
"""Generate an ADB key (if needed) and load it."""
adbkey = config.get(CONF_ADBKEY, hass.config.path(STORAGE_DIR, "androidtv_adbkey"))
if CONF_ADB_SERVER_IP not in config:
# Use "adb_shell" (Python ADB implementation)
if not os.path.isfile(adbkey):
# Generate ADB key files
keygen(adbkey)
# Load the ADB key
with open(adbkey) as priv_key:
priv = priv_key.read()
signer = PythonRSASigner("", priv)
adb_log = f"using Python ADB implementation with adbkey='{adbkey}'"
else:
# Use "pure-python-adb" (communicate with ADB server)
signer = None
adb_log = f"using ADB server at {config[CONF_ADB_SERVER_IP]}:{config[CONF_ADB_SERVER_PORT]}"
return adbkey, signer, adb_log
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Android TV / Fire TV platform."""
hass.data.setdefault(ANDROIDTV_DOMAIN, {})
@ -171,51 +198,21 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
_LOGGER.warning("Platform already setup on %s, skipping", address)
return
if CONF_ADB_SERVER_IP not in config:
# Use "adb_shell" (Python ADB implementation)
if CONF_ADBKEY not in config:
# Generate ADB key files (if they don't exist)
adbkey = hass.config.path(STORAGE_DIR, "androidtv_adbkey")
if not os.path.isfile(adbkey):
keygen(adbkey)
adbkey, signer, adb_log = await hass.async_add_executor_job(
setup_androidtv, hass, config
)
adb_log = f"using Python ADB implementation with adbkey='{adbkey}'"
aftv = setup(
config[CONF_HOST],
config[CONF_PORT],
adbkey,
device_class=config[CONF_DEVICE_CLASS],
state_detection_rules=config[CONF_STATE_DETECTION_RULES],
auth_timeout_s=10.0,
)
else:
adb_log = (
f"using Python ADB implementation with adbkey='{config[CONF_ADBKEY]}'"
)
aftv = setup(
config[CONF_HOST],
config[CONF_PORT],
config[CONF_ADBKEY],
device_class=config[CONF_DEVICE_CLASS],
state_detection_rules=config[CONF_STATE_DETECTION_RULES],
auth_timeout_s=10.0,
)
else:
# Use "pure-python-adb" (communicate with ADB server)
adb_log = f"using ADB server at {config[CONF_ADB_SERVER_IP]}:{config[CONF_ADB_SERVER_PORT]}"
aftv = setup(
config[CONF_HOST],
config[CONF_PORT],
adb_server_ip=config[CONF_ADB_SERVER_IP],
adb_server_port=config[CONF_ADB_SERVER_PORT],
device_class=config[CONF_DEVICE_CLASS],
state_detection_rules=config[CONF_STATE_DETECTION_RULES],
)
aftv = await setup(
config[CONF_HOST],
config[CONF_PORT],
adbkey,
config.get(CONF_ADB_SERVER_IP, ""),
config[CONF_ADB_SERVER_PORT],
config[CONF_STATE_DETECTION_RULES],
config[CONF_DEVICE_CLASS],
10.0,
signer,
)
if not aftv.available:
# Determine the name that will be used for the device in the log
@ -251,14 +248,16 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
device = FireTVDevice(*device_args)
device_name = config.get(CONF_NAME, "Fire TV")
add_entities([device])
async_add_entities([device])
_LOGGER.debug("Setup %s at %s %s", device_name, address, adb_log)
hass.data[ANDROIDTV_DOMAIN][address] = device
if hass.services.has_service(ANDROIDTV_DOMAIN, SERVICE_ADB_COMMAND):
return
def service_adb_command(service):
platform = entity_platform.current_platform.get()
async def service_adb_command(service):
"""Dispatch service calls to target entities."""
cmd = service.data[ATTR_COMMAND]
entity_id = service.data[ATTR_ENTITY_ID]
@ -269,7 +268,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
]
for target_device in target_devices:
output = target_device.adb_command(cmd)
output = await target_device.adb_command(cmd)
# log the output, if there is any
if output:
@ -280,14 +279,18 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
output,
)
hass.services.register(
hass.services.async_register(
ANDROIDTV_DOMAIN,
SERVICE_ADB_COMMAND,
service_adb_command,
schema=SERVICE_ADB_COMMAND_SCHEMA,
)
def service_download(service):
platform.async_register_entity_service(
SERVICE_LEARN_SENDEVENT, {}, "learn_sendevent"
)
async def service_download(service):
"""Download a file from your Android TV / Fire TV device to your Home Assistant instance."""
local_path = service.data[ATTR_LOCAL_PATH]
if not hass.config.is_allowed_path(local_path):
@ -302,16 +305,16 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
if dev.entity_id in entity_id
][0]
target_device.adb_pull(local_path, device_path)
await target_device.adb_pull(local_path, device_path)
hass.services.register(
hass.services.async_register(
ANDROIDTV_DOMAIN,
SERVICE_DOWNLOAD,
service_download,
schema=SERVICE_DOWNLOAD_SCHEMA,
)
def service_upload(service):
async def service_upload(service):
"""Upload a file from your Home Assistant instance to an Android TV / Fire TV device."""
local_path = service.data[ATTR_LOCAL_PATH]
if not hass.config.is_allowed_path(local_path):
@ -327,9 +330,9 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
]
for target_device in target_devices:
target_device.adb_push(local_path, device_path)
await target_device.adb_push(local_path, device_path)
hass.services.register(
hass.services.async_register(
ANDROIDTV_DOMAIN, SERVICE_UPLOAD, service_upload, schema=SERVICE_UPLOAD_SCHEMA
)
@ -345,13 +348,13 @@ def adb_decorator(override_available=False):
"""Wrap the provided ADB method and catch exceptions."""
@functools.wraps(func)
def _adb_exception_catcher(self, *args, **kwargs):
async def _adb_exception_catcher(self, *args, **kwargs):
"""Call an ADB-related method and catch exceptions."""
if not self.available and not override_available:
return None
try:
return func(self, *args, **kwargs)
return await func(self, *args, **kwargs)
except LockNotAcquiredException:
# If the ADB lock could not be acquired, skip this command
_LOGGER.info(
@ -364,7 +367,7 @@ def adb_decorator(override_available=False):
"establishing attempt in the next update. Error: %s",
err,
)
self.aftv.adb_close()
await self.aftv.adb_close()
self._available = False # pylint: disable=protected-access
return None
@ -411,6 +414,7 @@ class ADBDevice(MediaPlayerEntity):
if not self.aftv.adb_server_ip:
# Using "adb_shell" (Python ADB implementation)
self.exceptions = (
AdbTimeoutError,
AttributeError,
BrokenPipeError,
ConnectionResetError,
@ -487,64 +491,60 @@ class ADBDevice(MediaPlayerEntity):
"""Return the device unique id."""
return self._unique_id
@adb_decorator()
async def async_get_media_image(self):
"""Fetch current playing image."""
if not self._screencap or self.state in [STATE_OFF, None] or not self.available:
return None, None
media_data = await self.hass.async_add_executor_job(self.get_raw_media_data)
media_data = await self.aftv.adb_screencap()
if media_data:
return media_data, "image/png"
return None, None
@adb_decorator()
def get_raw_media_data(self):
"""Raw image data."""
return self.aftv.adb_screencap()
@adb_decorator()
def media_play(self):
async def async_media_play(self):
"""Send play command."""
self.aftv.media_play()
await self.aftv.media_play()
@adb_decorator()
def media_pause(self):
async def async_media_pause(self):
"""Send pause command."""
self.aftv.media_pause()
await self.aftv.media_pause()
@adb_decorator()
def media_play_pause(self):
async def async_media_play_pause(self):
"""Send play/pause command."""
self.aftv.media_play_pause()
await self.aftv.media_play_pause()
@adb_decorator()
def turn_on(self):
async def async_turn_on(self):
"""Turn on the device."""
if self.turn_on_command:
self.aftv.adb_shell(self.turn_on_command)
await self.aftv.adb_shell(self.turn_on_command)
else:
self.aftv.turn_on()
await self.aftv.turn_on()
@adb_decorator()
def turn_off(self):
async def async_turn_off(self):
"""Turn off the device."""
if self.turn_off_command:
self.aftv.adb_shell(self.turn_off_command)
await self.aftv.adb_shell(self.turn_off_command)
else:
self.aftv.turn_off()
await self.aftv.turn_off()
@adb_decorator()
def media_previous_track(self):
async def async_media_previous_track(self):
"""Send previous track command (results in rewind)."""
self.aftv.media_previous_track()
await self.aftv.media_previous_track()
@adb_decorator()
def media_next_track(self):
async def async_media_next_track(self):
"""Send next track command (results in fast-forward)."""
self.aftv.media_next_track()
await self.aftv.media_next_track()
@adb_decorator()
def select_source(self, source):
async def async_select_source(self, source):
"""Select input source.
If the source starts with a '!', then it will close the app instead of
@ -552,50 +552,58 @@ class ADBDevice(MediaPlayerEntity):
"""
if isinstance(source, str):
if not source.startswith("!"):
self.aftv.launch_app(self._app_name_to_id.get(source, source))
await self.aftv.launch_app(self._app_name_to_id.get(source, source))
else:
source_ = source[1:].lstrip()
self.aftv.stop_app(self._app_name_to_id.get(source_, source_))
await self.aftv.stop_app(self._app_name_to_id.get(source_, source_))
@adb_decorator()
def adb_command(self, cmd):
async def adb_command(self, cmd):
"""Send an ADB command to an Android TV / Fire TV device."""
key = self._keys.get(cmd)
if key:
self.aftv.adb_shell(f"input keyevent {key}")
self._adb_response = None
self.schedule_update_ha_state()
await self.aftv.adb_shell(f"input keyevent {key}")
return
if cmd == "GET_PROPERTIES":
self._adb_response = str(self.aftv.get_properties_dict())
self.schedule_update_ha_state()
self._adb_response = str(await self.aftv.get_properties_dict())
self.async_write_ha_state()
return self._adb_response
try:
response = self.aftv.adb_shell(cmd)
response = await self.aftv.adb_shell(cmd)
except UnicodeDecodeError:
self._adb_response = None
self.schedule_update_ha_state()
return
if isinstance(response, str) and response.strip():
self._adb_response = response.strip()
else:
self._adb_response = None
self.async_write_ha_state()
self.schedule_update_ha_state()
return self._adb_response
@adb_decorator()
def adb_pull(self, local_path, device_path):
"""Download a file from your Android TV / Fire TV device to your Home Assistant instance."""
self.aftv.adb_pull(local_path, device_path)
async def learn_sendevent(self):
"""Translate a key press on a remote to ADB 'sendevent' commands."""
output = await self.aftv.learn_sendevent()
if output:
self._adb_response = output
self.async_write_ha_state()
msg = f"Output from service '{SERVICE_LEARN_SENDEVENT}' from {self.entity_id}: '{output}'"
self.hass.components.persistent_notification.async_create(
msg, title="Android TV",
)
_LOGGER.info("%s", msg)
@adb_decorator()
def adb_push(self, local_path, device_path):
async def adb_pull(self, local_path, device_path):
"""Download a file from your Android TV / Fire TV device to your Home Assistant instance."""
await self.aftv.adb_pull(local_path, device_path)
@adb_decorator()
async def adb_push(self, local_path, device_path):
"""Upload a file from your Home Assistant instance to an Android TV / Fire TV device."""
self.aftv.adb_push(local_path, device_path)
await self.aftv.adb_push(local_path, device_path)
class AndroidTVDevice(ADBDevice):
@ -628,17 +636,12 @@ class AndroidTVDevice(ADBDevice):
self._volume_level = None
@adb_decorator(override_available=True)
def update(self):
async def async_update(self):
"""Update the device state and, if necessary, re-connect."""
# Check if device is disconnected.
if not self._available:
# Try to connect
self._available = self.aftv.adb_connect(always_log_errors=False)
# To be safe, wait until the next update to run ADB commands if
# using the Python ADB implementation.
if not self.aftv.adb_server_ip:
return
self._available = await self.aftv.adb_connect(always_log_errors=False)
# If the ADB connection is not intact, don't update.
if not self._available:
@ -652,7 +655,7 @@ class AndroidTVDevice(ADBDevice):
_,
self._is_volume_muted,
self._volume_level,
) = self.aftv.update(self._get_sources)
) = await self.aftv.update(self._get_sources)
self._state = ANDROIDTV_STATES.get(state)
if self._state is None:
@ -685,53 +688,50 @@ class AndroidTVDevice(ADBDevice):
return self._volume_level
@adb_decorator()
def media_stop(self):
async def async_media_stop(self):
"""Send stop command."""
self.aftv.media_stop()
await self.aftv.media_stop()
@adb_decorator()
def mute_volume(self, mute):
async def async_mute_volume(self, mute):
"""Mute the volume."""
self.aftv.mute_volume()
await self.aftv.mute_volume()
@adb_decorator()
def set_volume_level(self, volume):
async def async_set_volume_level(self, volume):
"""Set the volume level."""
self.aftv.set_volume_level(volume)
await self.aftv.set_volume_level(volume)
@adb_decorator()
def volume_down(self):
async def async_volume_down(self):
"""Send volume down command."""
self._volume_level = self.aftv.volume_down(self._volume_level)
self._volume_level = await self.aftv.volume_down(self._volume_level)
@adb_decorator()
def volume_up(self):
async def async_volume_up(self):
"""Send volume up command."""
self._volume_level = self.aftv.volume_up(self._volume_level)
self._volume_level = await self.aftv.volume_up(self._volume_level)
class FireTVDevice(ADBDevice):
"""Representation of a Fire TV device."""
@adb_decorator(override_available=True)
def update(self):
async def async_update(self):
"""Update the device state and, if necessary, re-connect."""
# Check if device is disconnected.
if not self._available:
# Try to connect
self._available = self.aftv.adb_connect(always_log_errors=False)
# To be safe, wait until the next update to run ADB commands if
# using the Python ADB implementation.
if not self.aftv.adb_server_ip:
return
self._available = await self.aftv.adb_connect(always_log_errors=False)
# If the ADB connection is not intact, don't update.
if not self._available:
return
# Get the `state`, `current_app`, and `running_apps`.
state, self._current_app, running_apps = self.aftv.update(self._get_sources)
state, self._current_app, running_apps = await self.aftv.update(
self._get_sources
)
self._state = ANDROIDTV_STATES.get(state)
if self._state is None:
@ -754,6 +754,6 @@ class FireTVDevice(ADBDevice):
return SUPPORT_FIRETV
@adb_decorator()
def media_stop(self):
async def async_media_stop(self):
"""Send stop (back) command."""
self.aftv.back()
await self.aftv.back()

View File

@ -33,3 +33,9 @@ upload:
local_path:
description: The filepath on your Home Assistant instance.
example: "/config/www/example.txt"
learn_sendevent:
description: Translate a key press on a remote into ADB 'sendevent' commands. You must press one button on the remote within 8 seconds of calling this service.
fields:
entity_id:
description: Name(s) of Android TV / Fire TV entities.
example: "media_player.android_tv_living_room"

View File

@ -2,6 +2,6 @@
"domain": "apache_kafka",
"name": "Apache Kafka",
"documentation": "https://www.home-assistant.io/integrations/apache_kafka",
"requirements": ["aiokafka==0.5.1"],
"requirements": ["aiokafka==0.6.0"],
"codeowners": ["@bachya"]
}

View File

@ -48,7 +48,7 @@ def setup(hass, config):
try:
apcups_data.update(no_throttle=True)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Failure while testing APCUPSd status retrieval.")
_LOGGER.exception("Failure while testing APCUPSd status retrieval")
return False
return True

View File

@ -36,13 +36,13 @@ SENSOR_TYPES = {
"battv": ["Battery Voltage", VOLT, "mdi:flash"],
"bcharge": ["Battery", UNIT_PERCENTAGE, "mdi:battery"],
"cable": ["Cable Type", "", "mdi:ethernet-cable"],
"cumonbatt": ["Total Time on Battery", "", "mdi:timer"],
"cumonbatt": ["Total Time on Battery", "", "mdi:timer-outline"],
"date": ["Status Date", "", "mdi:calendar-clock"],
"dipsw": ["Dip Switch Settings", "", "mdi:information-outline"],
"dlowbatt": ["Low Battery Signal", "", "mdi:clock-alert"],
"driver": ["Driver", "", "mdi:information-outline"],
"dshutd": ["Shutdown Delay", "", "mdi:timer"],
"dwake": ["Wake Delay", "", "mdi:timer"],
"dshutd": ["Shutdown Delay", "", "mdi:timer-outline"],
"dwake": ["Wake Delay", "", "mdi:timer-outline"],
"endapc": ["Date and Time", "", "mdi:calendar-clock"],
"extbatts": ["External Batteries", "", "mdi:information-outline"],
"firmware": ["Firmware Version", "", "mdi:information-outline"],
@ -60,10 +60,10 @@ SENSOR_TYPES = {
"mandate": ["Manufacture Date", "", "mdi:calendar"],
"masterupd": ["Master Update", "", "mdi:information-outline"],
"maxlinev": ["Input Voltage High", VOLT, "mdi:flash"],
"maxtime": ["Battery Timeout", "", "mdi:timer-off"],
"maxtime": ["Battery Timeout", "", "mdi:timer-off-outline"],
"mbattchg": ["Battery Shutdown", UNIT_PERCENTAGE, "mdi:battery-alert"],
"minlinev": ["Input Voltage Low", VOLT, "mdi:flash"],
"mintimel": ["Shutdown Time", "", "mdi:timer"],
"mintimel": ["Shutdown Time", "", "mdi:timer-outline"],
"model": ["Model", "", "mdi:information-outline"],
"nombattv": ["Battery Nominal Voltage", VOLT, "mdi:flash"],
"nominv": ["Nominal Input Voltage", VOLT, "mdi:flash"],
@ -85,7 +85,7 @@ SENSOR_TYPES = {
"status": ["Status", "", "mdi:information-outline"],
"stesti": ["Self Test Interval", "", "mdi:information-outline"],
"timeleft": ["Time Left", "", "mdi:clock-alert"],
"tonbatt": ["Time on Battery", "", "mdi:timer"],
"tonbatt": ["Time on Battery", "", "mdi:timer-outline"],
"upsmode": ["Mode", "", "mdi:information-outline"],
"upsname": ["Name", "", "mdi:information-outline"],
"version": ["Daemon Info", "", "mdi:information-outline"],

View File

@ -97,7 +97,7 @@ def setup_scanner(hass, config, see, discovery_info=None):
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, aprs_disconnect)
if not aprs_listener.start_event.wait(timeout):
_LOGGER.error("Timeout waiting for APRS to connect.")
_LOGGER.error("Timeout waiting for APRS to connect")
return
if not aprs_listener.start_success:
@ -141,7 +141,7 @@ class AprsListenerThread(threading.Thread):
try:
_LOGGER.info(
"Opening connection to %s with callsign %s.", self.host, self.callsign
"Opening connection to %s with callsign %s", self.host, self.callsign
)
self.ais.connect()
self.start_complete(
@ -152,7 +152,7 @@ class AprsListenerThread(threading.Thread):
self.start_complete(False, str(err))
except OSError:
_LOGGER.info(
"Closing connection to %s with callsign %s.", self.host, self.callsign
"Closing connection to %s with callsign %s", self.host, self.callsign
)
def stop(self):

View File

@ -0,0 +1,13 @@
{
"config": {
"step": {
"user": {
"data": {
"host": "Hostitel",
"port": "Port"
},
"description": "Zadejte n\u00e1zev hostitele nebo IP adresu za\u0159\u00edzen\u00ed."
}
}
}
}

View File

@ -9,7 +9,11 @@
"one": "Vide",
"other": "Vide"
},
"flow_title": "Arcam FMJ sur {host}",
"step": {
"confirm": {
"description": "Voulez-vous ajouter Arcam FMJ sur ` {host} ` \u00e0 HomeAssistant ?"
},
"user": {
"data": {
"host": "H\u00f4te",

View File

@ -1,11 +1,16 @@
{
"config": {
"error": {
"one": "uma",
"other": "mais"
},
"step": {
"user": {
"data": {
"host": "Servidor",
"port": "Porto"
}
},
"description": "Por favor, introduza o nome ou o endere\u00e7o IP do dispositivo."
}
}
}

View File

@ -15,7 +15,7 @@
"host": "\u0425\u043e\u0441\u0442",
"port": "\u041f\u043e\u0440\u0442"
},
"description": "\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0438\u043c\u044f \u0445\u043e\u0441\u0442\u0430 \u0438\u043b\u0438 IP-\u0430\u0434\u0440\u0435\u0441 \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430."
"description": "\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0434\u043e\u043c\u0435\u043d\u043d\u043e\u0435 \u0438\u043c\u044f \u0438\u043b\u0438 IP-\u0430\u0434\u0440\u0435\u0441 \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430."
}
}
},

View File

@ -59,7 +59,7 @@ def setup(hass, config):
if arlo_base_station is not None:
arlo_base_station.refresh_rate = scan_interval.total_seconds()
elif not arlo.cameras:
_LOGGER.error("No Arlo camera or base station available.")
_LOGGER.error("No Arlo camera or base station available")
return False
hass.data[DATA_ARLO] = arlo

View File

@ -103,7 +103,7 @@ async def async_setup(hass, config, retry_delay=FIRST_RETRY_TIME):
return True
if not api.is_connected:
_LOGGER.error("Error connecting %s to %s.", DOMAIN, conf[CONF_HOST])
_LOGGER.error("Error connecting %s to %s", DOMAIN, conf[CONF_HOST])
return False
hass.data[DATA_ASUSWRT] = api

View File

@ -54,7 +54,7 @@ class AsusWrtDeviceScanner(DeviceScanner):
self.last_results = await self.connection.async_get_connected_devices()
if self._connect_error:
self._connect_error = False
_LOGGER.error("Reconnected to ASUS router for device update")
_LOGGER.info("Reconnected to ASUS router for device update")
except OSError as err:
if not self._connect_error:

View File

@ -2,6 +2,6 @@
"domain": "asuswrt",
"name": "ASUSWRT",
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
"requirements": ["aioasuswrt==1.2.6"],
"requirements": ["aioasuswrt==1.2.7"],
"codeowners": ["@kennedyshead"]
}

View File

@ -69,9 +69,7 @@ class AsuswrtSensor(Entity):
self._speed = await self._api.async_get_current_transfer_rates()
if self._connect_error:
self._connect_error = False
_LOGGER.error(
"Reconnected to ASUS router for %s update", self.entity_id
)
_LOGGER.info("Reconnected to ASUS router for %s update", self.entity_id)
except OSError as err:
if not self._connect_error:
self._connect_error = True

View File

@ -0,0 +1,12 @@
{
"config": {
"step": {
"user": {
"data": {
"host": "Hostitel",
"port": "Port"
}
}
}
}
}

View File

@ -11,7 +11,7 @@
"user": {
"data": {
"email": "Courriel (facultatif)",
"host": "H\u00f4te",
"host": "Nom d'h\u00f4te ou adresse IP",
"port": "Port (10000)"
},
"title": "Se connecter \u00e0 l'appareil"

View File

@ -3,7 +3,9 @@
"step": {
"user": {
"data": {
"host": "Servidor"
"email": "E-mail (opcional)",
"host": "Servidor",
"port": "Porta"
}
}
}

View File

@ -143,7 +143,7 @@ class AtomeData:
values = self.atome_client.get_consumption(DAILY_TYPE)
self._day_usage = values["total"] / 1000
self._day_price = values["price"]
_LOGGER.debug("Updating Atome daily data. Got: %d.", self._day_usage)
_LOGGER.debug("Updating Atome daily data. Got: %d", self._day_usage)
except KeyError as error:
_LOGGER.error("Missing last value in values: %s: %s", values, error)
@ -165,7 +165,7 @@ class AtomeData:
values = self.atome_client.get_consumption(WEEKLY_TYPE)
self._week_usage = values["total"] / 1000
self._week_price = values["price"]
_LOGGER.debug("Updating Atome weekly data. Got: %d.", self._week_usage)
_LOGGER.debug("Updating Atome weekly data. Got: %d", self._week_usage)
except KeyError as error:
_LOGGER.error("Missing last value in values: %s: %s", values, error)
@ -187,7 +187,7 @@ class AtomeData:
values = self.atome_client.get_consumption(MONTHLY_TYPE)
self._month_usage = values["total"] / 1000
self._month_price = values["price"]
_LOGGER.debug("Updating Atome monthly data. Got: %d.", self._month_usage)
_LOGGER.debug("Updating Atome monthly data. Got: %d", self._month_usage)
except KeyError as error:
_LOGGER.error("Missing last value in values: %s: %s", values, error)
@ -209,7 +209,7 @@ class AtomeData:
values = self.atome_client.get_consumption(YEARLY_TYPE)
self._year_usage = values["total"] / 1000
self._year_price = values["price"]
_LOGGER.debug("Updating Atome yearly data. Got: %d.", self._year_usage)
_LOGGER.debug("Updating Atome yearly data. Got: %d", self._year_usage)
except KeyError as error:
_LOGGER.error("Missing last value in values: %s: %s", values, error)

View File

@ -60,7 +60,7 @@ async def async_request_validation(hass, config_entry, august_gateway):
# In the future this should start a new config flow
# instead of using the legacy configurator
#
_LOGGER.error("Access token is no longer valid.")
_LOGGER.error("Access token is no longer valid")
configurator = hass.components.configurator
entry_id = config_entry.entry_id
@ -351,7 +351,7 @@ class AugustData(AugustSubscriberMixin):
doorbell_detail = self._device_detail_by_id.get(device_id)
if doorbell_detail is None:
_LOGGER.info(
"The doorbell %s could not be setup because the system could not fetch details about the doorbell.",
"The doorbell %s could not be setup because the system could not fetch details about the doorbell",
doorbell.device_name,
)
else:
@ -373,17 +373,17 @@ class AugustData(AugustSubscriberMixin):
lock_detail = self._device_detail_by_id.get(device_id)
if lock_detail is None:
_LOGGER.info(
"The lock %s could not be setup because the system could not fetch details about the lock.",
"The lock %s could not be setup because the system could not fetch details about the lock",
lock.device_name,
)
elif lock_detail.bridge is None:
_LOGGER.info(
"The lock %s could not be setup because it does not have a bridge (Connect).",
"The lock %s could not be setup because it does not have a bridge (Connect)",
lock.device_name,
)
elif not lock_detail.bridge.operative:
_LOGGER.info(
"The lock %s could not be setup because the bridge (Connect) is not operative.",
"The lock %s could not be setup because the bridge (Connect) is not operative",
lock.device_name,
)
else:

View File

@ -88,7 +88,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
detail = data.get_device_detail(door.device_id)
if not detail.doorsense:
_LOGGER.debug(
"Not adding sensor class door for lock %s because it does not have doorsense.",
"Not adding sensor class door for lock %s because it does not have doorsense",
door.device_name,
)
continue

View File

@ -13,6 +13,8 @@ VERIFICATION_CODE_KEY = "verification_code"
NOTIFICATION_ID = "august_notification"
NOTIFICATION_TITLE = "August"
MANUFACTURER = "August Home Inc."
DEFAULT_AUGUST_CONFIG_FILE = ".august.conf"
DATA_AUGUST = "data_august"

View File

@ -5,7 +5,8 @@ import logging
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from . import DEFAULT_NAME, DOMAIN
from . import DOMAIN
from .const import MANUFACTURER
_LOGGER = logging.getLogger(__name__)
@ -38,7 +39,7 @@ class AugustEntityMixin(Entity):
return {
"identifiers": {(DOMAIN, self._device_id)},
"name": self._device.device_name,
"manufacturer": DEFAULT_NAME,
"manufacturer": MANUFACTURER,
"sw_version": self._detail.firmware_version,
"model": self._detail.model,
}

View File

@ -0,0 +1,12 @@
{
"config": {
"step": {
"user": {
"data": {
"password": "Heslo",
"username": "U\u017eivatelsk\u00e9 jm\u00e9no"
}
}
}
}
}

View File

@ -5,7 +5,8 @@
"data": {
"password": "Palavra-passe",
"username": "Nome de Utilizador"
}
},
"description": "Se o m\u00e9todo de login for 'email', Nome do utilizador \u00e9 o endere\u00e7o de email. Se o m\u00e9todo de login for 'telefone', Nome do utilizador ser\u00e1 o n\u00famero de telefone no formato '+NNNNNNNNN'."
}
}
}

View File

@ -9,9 +9,11 @@ import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_NAME,
CONF_ALIAS,
CONF_DEVICE_ID,
CONF_ENTITY_ID,
CONF_ID,
CONF_MODE,
CONF_PLATFORM,
CONF_ZONE,
EVENT_HOMEASSISTANT_STARTED,
@ -23,11 +25,20 @@ from homeassistant.const import (
)
from homeassistant.core import Context, CoreState, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import condition, extract_domain_configs, script
from homeassistant.helpers import condition, extract_domain_configs
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.script import (
ATTR_CUR,
ATTR_MAX,
ATTR_MODE,
CONF_MAX,
SCRIPT_MODE_SINGLE,
Script,
make_script_schema,
)
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.helpers.typing import TemplateVarsType
from homeassistant.loader import bind_hass
@ -41,7 +52,6 @@ ENTITY_ID_FORMAT = DOMAIN + ".{}"
GROUP_NAME_ALL_AUTOMATIONS = "all automations"
CONF_ALIAS = "alias"
CONF_DESCRIPTION = "description"
CONF_HIDE_ENTITY = "hide_entity"
@ -96,7 +106,7 @@ _CONDITION_SCHEMA = vol.All(cv.ensure_list, [cv.CONDITION_SCHEMA])
PLATFORM_SCHEMA = vol.All(
cv.deprecated(CONF_HIDE_ENTITY, invalidation_version="0.110"),
vol.Schema(
make_script_schema(
{
# str on purpose
CONF_ID: str,
@ -107,7 +117,8 @@ PLATFORM_SCHEMA = vol.All(
vol.Required(CONF_TRIGGER): _TRIGGER_SCHEMA,
vol.Optional(CONF_CONDITION): _CONDITION_SCHEMA,
vol.Required(CONF_ACTION): cv.SCRIPT_SCHEMA,
}
},
SCRIPT_MODE_SINGLE,
),
)
@ -268,7 +279,15 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
@property
def state_attributes(self):
"""Return the entity state attributes."""
return {ATTR_LAST_TRIGGERED: self._last_triggered}
attrs = {
ATTR_LAST_TRIGGERED: self._last_triggered,
ATTR_MODE: self.action_script.script_mode,
}
if self.action_script.supports_max:
attrs[ATTR_MAX] = self.action_script.max_runs
if self.is_on:
attrs[ATTR_CUR] = self.action_script.runs
return attrs
@property
def is_on(self) -> bool:
@ -334,7 +353,7 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
else:
enable_automation = DEFAULT_INITIAL_STATE
_LOGGER.debug(
"Automation %s not in state storage, state %s from default is used.",
"Automation %s not in state storage, state %s from default is used",
self.entity_id,
enable_automation,
)
@ -389,7 +408,7 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
try:
await self.action_script.async_run(variables, trigger_context)
except Exception: # pylint: disable=broad-except
pass
_LOGGER.exception("While executing automation %s", self.entity_id)
async def async_will_remove_from_hass(self):
"""Remove listeners when removing automation from Home Assistant."""
@ -498,8 +517,13 @@ async def _async_process_config(hass, config, component):
initial_state = config_block.get(CONF_INITIAL_STATE)
action_script = script.Script(
hass, config_block.get(CONF_ACTION, {}), name, logger=_LOGGER
action_script = Script(
hass,
config_block[CONF_ACTION],
name,
script_mode=config_block[CONF_MODE],
max_runs=config_block[CONF_MAX],
logger=_LOGGER,
)
if CONF_CONDITION in config_block:

View File

@ -10,7 +10,8 @@ from homeassistant.components.device_automation.exceptions import (
from homeassistant.config import async_log_exception, config_without_domain
from homeassistant.const import CONF_PLATFORM
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import condition, config_per_platform, script
from homeassistant.helpers import condition, config_per_platform
from homeassistant.helpers.script import async_validate_action_config
from homeassistant.loader import IntegrationNotFound
from . import CONF_ACTION, CONF_CONDITION, CONF_TRIGGER, DOMAIN, PLATFORM_SCHEMA
@ -44,10 +45,7 @@ async def async_validate_config_item(hass, config, full_config=None):
)
config[CONF_ACTION] = await asyncio.gather(
*[
script.async_validate_action_config(hass, action)
for action in config[CONF_ACTION]
]
*[async_validate_action_config(hass, action) for action in config[CONF_ACTION]]
)
return config
@ -71,19 +69,18 @@ async def _try_async_validate_config_item(hass, config, full_config=None):
async def async_validate_config(hass, config):
"""Validate config."""
validated_automations = await asyncio.gather(
*(
_try_async_validate_config_item(hass, p_config, config)
for _, p_config in config_per_platform(config, DOMAIN)
automations = list(
filter(
lambda x: x is not None,
await asyncio.gather(
*(
_try_async_validate_config_item(hass, p_config, config)
for _, p_config in config_per_platform(config, DOMAIN)
)
),
)
)
automations = [
validated_automation
for validated_automation in validated_automations
if validated_automation is not None
]
# Create a copy of the configuration with all config for current
# component removed and add validated config back in.
config = config_without_domain(config, DOMAIN)

View File

@ -14,7 +14,10 @@ from homeassistant.const import (
)
from homeassistant.core import CALLBACK_TYPE, callback
from homeassistant.helpers import condition, config_validation as cv, template
from homeassistant.helpers.event import async_track_same_state, async_track_state_change
from homeassistant.helpers.event import (
async_track_same_state,
async_track_state_change_event,
)
# mypy: allow-incomplete-defs, allow-untyped-calls, allow-untyped-defs
# mypy: no-check-untyped-defs
@ -94,8 +97,11 @@ async def async_attach_trigger(
)
@callback
def state_automation_listener(entity, from_s, to_s):
def state_automation_listener(event):
"""Listen for state changes and calls action."""
entity = event.data.get("entity_id")
from_s = event.data.get("old_state")
to_s = event.data.get("new_state")
@callback
def call_action():
@ -168,7 +174,7 @@ async def async_attach_trigger(
else:
call_action()
unsub = async_track_state_change(hass, entity_id, state_automation_listener)
unsub = async_track_state_change_event(hass, entity_id, state_automation_listener)
@callback
def async_remove():

View File

@ -6,12 +6,13 @@ from typing import Dict
import voluptuous as vol
from homeassistant import exceptions
from homeassistant.const import CONF_FOR, CONF_PLATFORM, EVENT_STATE_CHANGED, MATCH_ALL
from homeassistant.const import CONF_FOR, CONF_PLATFORM, MATCH_ALL
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, template
from homeassistant.helpers.event import (
Event,
async_track_same_state,
async_track_state_change_event,
process_state_match,
)
@ -153,7 +154,7 @@ async def async_attach_trigger(
hass, period[entity], call_action, _check_same_state, entity_ids=entity,
)
unsub = hass.bus.async_listen(EVENT_STATE_CHANGED, state_automation_listener)
unsub = async_track_state_change_event(hass, entity_id, state_automation_listener)
@callback
def async_remove():

View File

@ -1,16 +1,10 @@
"""Offer zone automation rules."""
import voluptuous as vol
from homeassistant.const import (
CONF_ENTITY_ID,
CONF_EVENT,
CONF_PLATFORM,
CONF_ZONE,
MATCH_ALL,
)
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_PLATFORM, CONF_ZONE
from homeassistant.core import callback
from homeassistant.helpers import condition, config_validation as cv, location
from homeassistant.helpers.event import async_track_state_change
from homeassistant.helpers.event import async_track_state_change_event
# mypy: allow-untyped-defs, no-check-untyped-defs
@ -37,8 +31,12 @@ async def async_attach_trigger(hass, config, action, automation_info):
event = config.get(CONF_EVENT)
@callback
def zone_automation_listener(entity, from_s, to_s):
def zone_automation_listener(zone_event):
"""Listen for state changes and calls action."""
entity = zone_event.data.get("entity_id")
from_s = zone_event.data.get("old_state")
to_s = zone_event.data.get("new_state")
if (
from_s
and not location.has_location(from_s)
@ -74,6 +72,4 @@ async def async_attach_trigger(hass, config, action, automation_info):
)
)
return async_track_state_change(
hass, entity_id, zone_automation_listener, MATCH_ALL, MATCH_ALL
)
return async_track_state_change_event(hass, entity_id, zone_automation_listener)

View File

@ -22,7 +22,7 @@ async def async_setup_entry(
integration_id = entry.data[CONF_ID]
try:
each_upcoming = client.upcoming_of_each()
each_upcoming = await hass.async_add_executor_job(client.upcoming_of_each)
except AvriException as ex:
raise PlatformNotReady from ex
else:

View File

@ -4,11 +4,13 @@
"already_configured": "D\u00ebs Adress ass scho konfigur\u00e9iert."
},
"error": {
"invalid_country_code": "Onbekannte Zweestellege L\u00e4nner Code",
"invalid_house_number": "Ong\u00eblteg Haus Nummer"
},
"step": {
"user": {
"data": {
"country_code": "Zweestellege L\u00e4nner Code",
"house_number": "Haus Nummer",
"house_number_extension": "Haus Nummer Extensioun",
"zip_code": "Postleitzuel"

View File

@ -0,0 +1,16 @@
{
"config": {
"error": {
"invalid_house_number": "Nieprawid\u0142owy numer domu"
},
"step": {
"user": {
"data": {
"country_code": "Dwuliterowy kod kraju",
"house_number": "Numer domu",
"zip_code": "Kod pocztowy"
}
}
}
}
}

View File

@ -0,0 +1,11 @@
{
"config": {
"step": {
"user": {
"data": {
"zip_code": "C\u00f3digo postal"
}
}
}
}
}

View File

@ -21,7 +21,8 @@
"data": {
"access_token": "Token d'acc\u00e9s",
"email": "Correu electr\u00f2nic"
}
},
"description": "T'has de registrar a Awair per a obtenir un token d'acc\u00e9s de desenvolupador a trav\u00e9s de l'enlla\u00e7 seg\u00fcent: https://developer.getawair.com/onboard/login"
}
}
}

View File

@ -0,0 +1,26 @@
{
"config": {
"abort": {
"already_configured": "\u00da\u010det je ji\u017e nakonfigurov\u00e1n",
"no_devices": "V s\u00edti nebyla nalezena \u017e\u00e1dn\u00e1 za\u0159\u00edzen\u00ed.",
"reauth_successful": "P\u0159\u00edstupov\u00fd token \u00fasp\u011b\u0161n\u011b aktualizov\u00e1n"
},
"error": {
"auth": "Neplatn\u00fd p\u0159\u00edstupov\u00fd token"
},
"step": {
"reauth": {
"data": {
"access_token": "P\u0159\u00edstupov\u00fd token",
"email": "E-mail"
}
},
"user": {
"data": {
"access_token": "P\u0159\u00edstupov\u00fd token",
"email": "E-mail"
}
}
}
}
}

View File

@ -0,0 +1,12 @@
{
"config": {
"error": {
"unknown": "Unbekannter Awair-API-Fehler."
},
"step": {
"reauth": {
"description": "Bitte geben Sie Ihr Awair-Entwicklerzugriffstoken erneut ein."
}
}
}
}

View File

@ -0,0 +1,29 @@
{
"config": {
"abort": {
"already_configured": "Le compte est d\u00e9j\u00e0 configur\u00e9",
"no_devices": "Pas d'appareil trouv\u00e9 sur le r\u00e9seau",
"reauth_successful": "Jeton d'acc\u00e8s mis \u00e0 jour avec succ\u00e8s"
},
"error": {
"auth": "Jeton d'acc\u00e8s invalide",
"unknown": "Erreur d'API Awair inconnue."
},
"step": {
"reauth": {
"data": {
"access_token": "Jeton d'acc\u00e8s",
"email": "Email"
},
"description": "Veuillez ressaisir votre jeton d'acc\u00e8s d\u00e9veloppeur Awair."
},
"user": {
"data": {
"access_token": "Jeton d'acc\u00e8s",
"email": "Email"
},
"description": "Vous devez vous inscrire pour un jeton d'acc\u00e8s d\u00e9veloppeur Awair sur: https://developer.getawair.com/onboard/login"
}
}
}
}

View File

@ -0,0 +1,29 @@
{
"config": {
"abort": {
"already_configured": "L'account \u00e8 gi\u00e0 configurato",
"no_devices": "Nessun dispositivo trovato sulla rete",
"reauth_successful": "Token di accesso aggiornato correttamente"
},
"error": {
"auth": "Token di accesso non valido",
"unknown": "Errore API Awair sconosciuto."
},
"step": {
"reauth": {
"data": {
"access_token": "Token di accesso",
"email": "E-mail"
},
"description": "Inserisci nuovamente il tuo token di accesso per sviluppatori Awair."
},
"user": {
"data": {
"access_token": "Token di accesso",
"email": "E-mail"
},
"description": "\u00c8 necessario registrarsi per un token di accesso per sviluppatori Awair all'indirizzo: https://developer.getawair.com/onboard/login"
}
}
}
}

View File

@ -0,0 +1,29 @@
{
"config": {
"abort": {
"already_configured": "\uacc4\uc815\uc774 \uc774\ubbf8 \uad6c\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4.",
"no_devices": "\ub124\ud2b8\uc6cc\ud06c\uc5d0\uc11c \uae30\uae30\ub97c \ucc3e\uc744 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4",
"reauth_successful": "\uc561\uc138\uc2a4 \ud1a0\ud070\uc774 \uc131\uacf5\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub418\uc5c8\uc2b5\ub2c8\ub2e4"
},
"error": {
"auth": "\uc561\uc138\uc2a4 \ud1a0\ud070\uc774 \uc798\ubabb\ub418\uc5c8\uc2b5\ub2c8\ub2e4",
"unknown": "\uc54c \uc218 \uc5c6\ub294 Awair API \uc624\ub958\uac00 \ubc1c\uc0dd\ud588\uc2b5\ub2c8\ub2e4."
},
"step": {
"reauth": {
"data": {
"access_token": "\uc561\uc138\uc2a4 \ud1a0\ud070",
"email": "\uc774\uba54\uc77c"
},
"description": "Awair \uac1c\ubc1c\uc790 \uc561\uc138\uc2a4 \ud1a0\ud070\uc744 \ub2e4\uc2dc \uc785\ub825\ud574\uc8fc\uc138\uc694."
},
"user": {
"data": {
"access_token": "\uc561\uc138\uc2a4 \ud1a0\ud070",
"email": "\uc774\uba54\uc77c"
},
"description": "https://developer.getawair.com/onboard/login \uc5d0 Awair \uac1c\ubc1c\uc790 \uc561\uc138\uc2a4 \ud1a0\ud070\uc744 \ub4f1\ub85d\ud574\uc57c\ud569\ub2c8\ub2e4"
}
}
}
}

View File

@ -0,0 +1,29 @@
{
"config": {
"abort": {
"already_configured": "Kont ass",
"no_devices": "Keng Apparater am Netzwierk fonnt",
"reauth_successful": "Acc\u00e8s Jeton erfollegr\u00e4ich aktualis\u00e9iert"
},
"error": {
"auth": "Ong\u00ebltege Acc\u00e8s Jeton",
"unknown": "Onbekannten Awair API Feeler"
},
"step": {
"reauth": {
"data": {
"access_token": "Acc\u00e8s Jeton",
"email": "E-Mail"
},
"description": "G\u00ebff d\u00e4in Awair Developpeur Acc\u00e8s jeton nach emol un."
},
"user": {
"data": {
"access_token": "Acc\u00e8s Jeton",
"email": "E-Mail"
},
"description": "Du muss dech fir een Awair Developpeur Acc\u00e8s Jeton registr\u00e9ien op:\nhttps://developer.getawair.com/onboard/login"
}
}
}
}

View File

@ -0,0 +1,25 @@
{
"config": {
"abort": {
"already_configured": "Conta j\u00e1 configurada",
"no_devices": "Nenhum dispositivo encontrado na rede",
"reauth_successful": "Token de Acesso actualizado com sucesso"
},
"error": {
"auth": "Token de acesso inv\u00e1lido"
},
"step": {
"reauth": {
"data": {
"email": "Email"
}
},
"user": {
"data": {
"access_token": "Token de Acesso",
"email": "Email"
}
}
}
}
}

View File

@ -3,5 +3,5 @@
"name": "Amazon Web Services (AWS)",
"documentation": "https://www.home-assistant.io/integrations/aws",
"requirements": ["aiobotocore==0.11.1"],
"codeowners": ["@awarecan", "@robbiet480"]
"codeowners": ["@awarecan"]
}

View File

@ -1,5 +1,15 @@
{
"config": {
"flow_title": "Za\u0159\u00edzen\u00ed Axis: {name} ({host})"
"flow_title": "Za\u0159\u00edzen\u00ed Axis: {name} ({host})",
"step": {
"user": {
"data": {
"host": "Hostitel",
"password": "Heslo",
"port": "Port",
"username": "U\u017eivatelsk\u00e9 jm\u00e9no"
}
}
}
}
}

View File

@ -16,7 +16,7 @@
"step": {
"user": {
"data": {
"host": "H\u00f4te",
"host": "Nom d'h\u00f4te ou adresse IP",
"password": "Mot de passe",
"port": "Port",
"username": "Nom d'utilisateur"

Some files were not shown because too many files have changed in this diff Show More