Upgrade black to 20.8b1 (#39287)
parent
0d7eec710c
commit
1c2ebdf307
|
@ -5,7 +5,7 @@ repos:
|
|||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 19.10b0
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
|
|
|
@ -312,9 +312,7 @@ class AuthManager:
|
|||
|
||||
if provider is not None and hasattr(provider, "async_will_remove_credentials"):
|
||||
# https://github.com/python/mypy/issues/1424
|
||||
await provider.async_will_remove_credentials( # type: ignore
|
||||
credentials
|
||||
)
|
||||
await provider.async_will_remove_credentials(credentials) # type: ignore
|
||||
|
||||
await self._store.async_remove_credentials(credentials)
|
||||
|
||||
|
|
|
@ -48,7 +48,10 @@ class User:
|
|||
)
|
||||
|
||||
_permissions: Optional[perm_mdl.PolicyPermissions] = attr.ib(
|
||||
init=False, eq=False, order=False, default=None,
|
||||
init=False,
|
||||
eq=False,
|
||||
order=False,
|
||||
default=None,
|
||||
)
|
||||
|
||||
@property
|
||||
|
|
|
@ -112,7 +112,8 @@ async def async_setup_hass(
|
|||
config_dict = await conf_util.async_hass_config_yaml(hass)
|
||||
except HomeAssistantError as err:
|
||||
_LOGGER.error(
|
||||
"Failed to parse configuration.yaml: %s. Activating safe mode", err,
|
||||
"Failed to parse configuration.yaml: %s. Activating safe mode",
|
||||
err,
|
||||
)
|
||||
else:
|
||||
if not is_virtual_env():
|
||||
|
@ -160,7 +161,8 @@ async def async_setup_hass(
|
|||
http_conf = (await http.async_get_last_config(hass)) or {}
|
||||
|
||||
await async_from_config_dict(
|
||||
{"safe_mode": {}, "http": http_conf}, hass,
|
||||
{"safe_mode": {}, "http": http_conf},
|
||||
hass,
|
||||
)
|
||||
|
||||
if runtime_config.open_ui:
|
||||
|
@ -331,8 +333,10 @@ def async_enable_logging(
|
|||
):
|
||||
|
||||
if log_rotate_days:
|
||||
err_handler: logging.FileHandler = logging.handlers.TimedRotatingFileHandler(
|
||||
err_log_path, when="midnight", backupCount=log_rotate_days
|
||||
err_handler: logging.FileHandler = (
|
||||
logging.handlers.TimedRotatingFileHandler(
|
||||
err_log_path, when="midnight", backupCount=log_rotate_days
|
||||
)
|
||||
)
|
||||
else:
|
||||
err_handler = logging.FileHandler(err_log_path, mode="w", delay=True)
|
||||
|
@ -391,7 +395,8 @@ async def _async_log_pending_setups(
|
|||
|
||||
if remaining:
|
||||
_LOGGER.warning(
|
||||
"Waiting on integrations to complete setup: %s", ", ".join(remaining),
|
||||
"Waiting on integrations to complete setup: %s",
|
||||
", ".join(remaining),
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -37,5 +37,6 @@ async def update_devices(hass, config_entry, api):
|
|||
)
|
||||
if device is not None:
|
||||
dev_registry.async_update_device(
|
||||
device.id, name=api_item.name,
|
||||
device.id,
|
||||
name=api_item.name,
|
||||
)
|
||||
|
|
|
@ -220,7 +220,8 @@ async def async_setup_entry(hass, config_entry):
|
|||
)
|
||||
else:
|
||||
api_coro = client.api.nearest_city(
|
||||
config_entry.data[CONF_LATITUDE], config_entry.data[CONF_LONGITUDE],
|
||||
config_entry.data[CONF_LATITUDE],
|
||||
config_entry.data[CONF_LONGITUDE],
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
@ -98,7 +98,13 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
|||
if config_entry.data[CONF_INTEGRATION_TYPE] == INTEGRATION_TYPE_GEOGRAPHY:
|
||||
sensors = [
|
||||
AirVisualGeographySensor(
|
||||
coordinator, config_entry, kind, name, icon, unit, locale,
|
||||
coordinator,
|
||||
config_entry,
|
||||
kind,
|
||||
name,
|
||||
icon,
|
||||
unit,
|
||||
locale,
|
||||
)
|
||||
for locale in GEOGRAPHY_SENSOR_LOCALES
|
||||
for kind, name, icon, unit in GEOGRAPHY_SENSORS
|
||||
|
|
|
@ -108,8 +108,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: config_entries.ConfigEnt
|
|||
auth = AlmondLocalAuth(entry.data["host"], websession)
|
||||
else:
|
||||
# OAuth2
|
||||
implementation = await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
oauth_session = config_entry_oauth2_flow.OAuth2Session(
|
||||
hass, entry, implementation
|
||||
|
|
|
@ -597,7 +597,8 @@ class ADBDevice(MediaPlayerEntity):
|
|||
|
||||
msg = f"Output from service '{SERVICE_LEARN_SENDEVENT}' from {self.entity_id}: '{output}'"
|
||||
self.hass.components.persistent_notification.async_create(
|
||||
msg, title="Android TV",
|
||||
msg,
|
||||
title="Android TV",
|
||||
)
|
||||
_LOGGER.info("%s", msg)
|
||||
|
||||
|
|
|
@ -42,7 +42,8 @@ class ArcamFmjFlowHandler(config_entries.ConfigFlow):
|
|||
await client.stop()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=f"{DEFAULT_NAME} ({host})", data={CONF_HOST: host, CONF_PORT: port},
|
||||
title=f"{DEFAULT_NAME} ({host})",
|
||||
data={CONF_HOST: host, CONF_PORT: port},
|
||||
)
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
|
|
|
@ -60,7 +60,10 @@ class ArcamFmj(MediaPlayerEntity):
|
|||
"""Representation of a media device."""
|
||||
|
||||
def __init__(
|
||||
self, device_name, state: State, uuid: str,
|
||||
self,
|
||||
device_name,
|
||||
state: State,
|
||||
uuid: str,
|
||||
):
|
||||
"""Initialize device."""
|
||||
self._state = state
|
||||
|
|
|
@ -30,7 +30,9 @@ DATA_SCHEMA = vol.Schema(
|
|||
|
||||
|
||||
async def async_validate_input(
|
||||
hass: core.HomeAssistant, data, august_gateway,
|
||||
hass: core.HomeAssistant,
|
||||
data,
|
||||
august_gateway,
|
||||
):
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
|
@ -89,7 +91,9 @@ class AugustConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
|
||||
try:
|
||||
info = await async_validate_input(
|
||||
self.hass, user_input, self._august_gateway,
|
||||
self.hass,
|
||||
user_input,
|
||||
self._august_gateway,
|
||||
)
|
||||
await self.async_set_unique_id(user_input[CONF_USERNAME])
|
||||
return self.async_create_entry(title=info["title"], data=info["data"])
|
||||
|
|
|
@ -122,8 +122,8 @@ class AugustGateway:
|
|||
"""Refresh the august access token if needed."""
|
||||
if self.authenticator.should_refresh():
|
||||
async with self._token_refresh_lock:
|
||||
refreshed_authentication = await self.authenticator.async_refresh_access_token(
|
||||
force=False
|
||||
refreshed_authentication = (
|
||||
await self.authenticator.async_refresh_access_token(force=False)
|
||||
)
|
||||
_LOGGER.info(
|
||||
"Refreshed august access token. The old token expired at %s, and the new token expires at %s",
|
||||
|
|
|
@ -70,7 +70,8 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
|||
)
|
||||
continue
|
||||
_LOGGER.debug(
|
||||
"Adding battery sensor for %s", device.device_name,
|
||||
"Adding battery sensor for %s",
|
||||
device.device_name,
|
||||
)
|
||||
devices.append(AugustBatterySensor(data, "device_battery", device, device))
|
||||
|
||||
|
@ -84,7 +85,8 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
|||
)
|
||||
continue
|
||||
_LOGGER.debug(
|
||||
"Adding keypad battery sensor for %s", device.device_name,
|
||||
"Adding keypad battery sensor for %s",
|
||||
device.device_name,
|
||||
)
|
||||
keypad_battery_sensor = AugustBatterySensor(
|
||||
data, "linked_keypad_battery", detail.keypad, device
|
||||
|
|
|
@ -32,7 +32,9 @@ class AvriConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
async def _show_setup_form(self, errors=None):
|
||||
"""Show the setup form to the user."""
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors or {},
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
errors=errors or {},
|
||||
)
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
|
|
|
@ -96,7 +96,9 @@ class AwairDataUpdateCoordinator(DataUpdateCoordinator):
|
|||
if not matching_flows:
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.flow.async_init(
|
||||
DOMAIN, context=flow_context, data=self._config_entry.data,
|
||||
DOMAIN,
|
||||
context=flow_context,
|
||||
data=self._config_entry.data,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -32,7 +32,8 @@ from .const import (
|
|||
)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{vol.Required(CONF_ACCESS_TOKEN): cv.string}, extra=vol.ALLOW_EXTRA,
|
||||
{vol.Required(CONF_ACCESS_TOKEN): cv.string},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
|
@ -43,7 +44,9 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||
)
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config,
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=config,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -83,7 +86,10 @@ class AwairSensor(Entity):
|
|||
"""Defines an Awair sensor entity."""
|
||||
|
||||
def __init__(
|
||||
self, kind: str, device: AwairDevice, coordinator: AwairDataUpdateCoordinator,
|
||||
self,
|
||||
kind: str,
|
||||
device: AwairDevice,
|
||||
coordinator: AwairDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Set up an individual AwairSensor."""
|
||||
self._kind = kind
|
||||
|
|
|
@ -54,8 +54,8 @@ class AxisLight(AxisEventBase, LightEntity):
|
|||
|
||||
def get_light_capabilities():
|
||||
"""Get light capabilities."""
|
||||
current_intensity = self.device.api.vapix.light_control.get_current_intensity(
|
||||
self.light_id
|
||||
current_intensity = (
|
||||
self.device.api.vapix.light_control.get_current_intensity(self.light_id)
|
||||
)
|
||||
self.current_intensity = current_intensity["data"]["intensity"]
|
||||
|
||||
|
|
|
@ -38,7 +38,9 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool
|
|||
)
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "reauth"}, data=entry.data,
|
||||
DOMAIN,
|
||||
context={"source": "reauth"},
|
||||
data=entry.data,
|
||||
)
|
||||
)
|
||||
return False
|
||||
|
@ -115,7 +117,13 @@ class AzureDevOpsDeviceEntity(AzureDevOpsEntity):
|
|||
def device_info(self) -> Dict[str, Any]:
|
||||
"""Return device information about this Azure DevOps instance."""
|
||||
return {
|
||||
"identifiers": {(DOMAIN, self.organization, self.project,)},
|
||||
"identifiers": {
|
||||
(
|
||||
DOMAIN,
|
||||
self.organization,
|
||||
self.project,
|
||||
)
|
||||
},
|
||||
"manufacturer": self.organization,
|
||||
"name": self.project,
|
||||
}
|
||||
|
|
|
@ -103,7 +103,8 @@ async def async_setup_entry(hass, entry):
|
|||
"""Call blink to send new pin."""
|
||||
pin = call.data[CONF_PIN]
|
||||
hass.data[DOMAIN][entry.entry_id].auth.send_auth_key(
|
||||
hass.data[DOMAIN][entry.entry_id], pin,
|
||||
hass.data[DOMAIN][entry.entry_id],
|
||||
pin,
|
||||
)
|
||||
|
||||
hass.services.async_register(DOMAIN, SERVICE_REFRESH, blink_refresh)
|
||||
|
|
|
@ -86,7 +86,9 @@ class BlinkConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=vol.Schema(data_schema), errors=errors,
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(data_schema),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_2fa(self, user_input=None):
|
||||
|
@ -156,7 +158,12 @@ class BlinkOptionsFlowHandler(config_entries.OptionsFlow):
|
|||
return self.async_show_form(
|
||||
step_id="simple_options",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Optional(CONF_SCAN_INTERVAL, default=scan_interval,): int}
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_SCAN_INTERVAL,
|
||||
default=scan_interval,
|
||||
): int
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
|
|
@ -107,7 +107,9 @@ class BroadlinkFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
|
||||
}
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=vol.Schema(data_schema), errors=errors,
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(data_schema),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_auth(self):
|
||||
|
|
|
@ -71,7 +71,10 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator):
|
|||
self.brother = Brother(host, kind=kind)
|
||||
|
||||
super().__init__(
|
||||
hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL,
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self):
|
||||
|
|
|
@ -86,7 +86,10 @@ class BSBLanClimate(ClimateEntity):
|
|||
"""Defines a BSBLan climate device."""
|
||||
|
||||
def __init__(
|
||||
self, entry_id: str, bsblan: BSBLan, info: Info,
|
||||
self,
|
||||
entry_id: str,
|
||||
bsblan: BSBLan,
|
||||
info: Info,
|
||||
):
|
||||
"""Initialize BSBLan climate device."""
|
||||
self._current_temperature: Optional[float] = None
|
||||
|
|
|
@ -65,7 +65,10 @@ class CertExpiryDataUpdateCoordinator(DataUpdateCoordinator[datetime]):
|
|||
name = f"{self.host}{display_port}"
|
||||
|
||||
super().__init__(
|
||||
hass, _LOGGER, name=name, update_interval=SCAN_INTERVAL,
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=name,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> Optional[datetime]:
|
||||
|
|
|
@ -60,7 +60,8 @@ class CertexpiryConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
title_port = f":{port}" if port != DEFAULT_PORT else ""
|
||||
title = f"{host}{title_port}"
|
||||
return self.async_create_entry(
|
||||
title=title, data={CONF_HOST: host, CONF_PORT: port},
|
||||
title=title,
|
||||
data={CONF_HOST: host, CONF_PORT: port},
|
||||
)
|
||||
if ( # pylint: disable=no-member
|
||||
self.context["source"] == config_entries.SOURCE_IMPORT
|
||||
|
|
|
@ -69,13 +69,19 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||
platform = entity_platform.current_platform.get()
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SEEK_FORWARD, {}, "seek_forward",
|
||||
SERVICE_SEEK_FORWARD,
|
||||
{},
|
||||
"seek_forward",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SEEK_BACKWARD, {}, "seek_backward",
|
||||
SERVICE_SEEK_BACKWARD,
|
||||
{},
|
||||
"seek_backward",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SEEK_BY, {vol.Required(ATTR_SECONDS): vol.Coerce(int)}, "seek_by",
|
||||
SERVICE_SEEK_BY,
|
||||
{vol.Required(ATTR_SECONDS): vol.Coerce(int)},
|
||||
"seek_by",
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -72,7 +72,10 @@ _CLOUD_ERRORS = {
|
|||
"Remote UI not compatible with 127.0.0.1/::1 as trusted proxies.",
|
||||
),
|
||||
asyncio.TimeoutError: (502, "Unable to reach the Home Assistant cloud."),
|
||||
aiohttp.ClientError: (HTTP_INTERNAL_SERVER_ERROR, "Error making internal request",),
|
||||
aiohttp.ClientError: (
|
||||
HTTP_INTERNAL_SERVER_ERROR,
|
||||
"Error making internal request",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -36,7 +36,8 @@ async def async_setup_entry(
|
|||
entry_data = hass.data[DOMAIN][entry.entry_id]
|
||||
scan_interval = entry_data[CONF_SCAN_INTERVAL]
|
||||
_LOGGER.debug(
|
||||
"Scan interval = %s", scan_interval,
|
||||
"Scan interval = %s",
|
||||
scan_interval,
|
||||
)
|
||||
|
||||
async def async_update_data_non_dimmer():
|
||||
|
@ -95,7 +96,8 @@ async def async_setup_entry(
|
|||
continue
|
||||
except KeyError:
|
||||
_LOGGER.exception(
|
||||
"Unknown device properties received from Control4: %s", item,
|
||||
"Unknown device properties received from Control4: %s",
|
||||
item,
|
||||
)
|
||||
continue
|
||||
|
||||
|
|
|
@ -61,7 +61,10 @@ class CoolmasterDataUpdateCoordinator(DataUpdateCoordinator):
|
|||
self._coolmaster = coolmaster
|
||||
|
||||
super().__init__(
|
||||
hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL,
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self):
|
||||
|
|
|
@ -338,5 +338,6 @@ class CoverDevice(CoverEntity):
|
|||
"""Print deprecation warning."""
|
||||
super().__init_subclass__(**kwargs)
|
||||
_LOGGER.warning(
|
||||
"CoverDevice is deprecated, modify %s to extend CoverEntity", cls.__name__,
|
||||
"CoverDevice is deprecated, modify %s to extend CoverEntity",
|
||||
cls.__name__,
|
||||
)
|
||||
|
|
|
@ -85,17 +85,23 @@ class FlowHandler(config_entries.ConfigFlow):
|
|||
)
|
||||
except web_exceptions.HTTPForbidden:
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=self.schema, errors={"base": "forbidden"},
|
||||
step_id="user",
|
||||
data_schema=self.schema,
|
||||
errors={"base": "forbidden"},
|
||||
)
|
||||
except ClientError:
|
||||
_LOGGER.exception("ClientError")
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=self.schema, errors={"base": "device_fail"},
|
||||
step_id="user",
|
||||
data_schema=self.schema,
|
||||
errors={"base": "device_fail"},
|
||||
)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected error creating device")
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=self.schema, errors={"base": "device_fail"},
|
||||
step_id="user",
|
||||
data_schema=self.schema,
|
||||
errors={"base": "device_fail"},
|
||||
)
|
||||
|
||||
mac = device.mac
|
||||
|
|
|
@ -62,7 +62,8 @@ def retry(method):
|
|||
return method(device, *args, **kwargs)
|
||||
except (decora.decoraException, AttributeError, BTLEException):
|
||||
_LOGGER.warning(
|
||||
"Decora connect error for device %s. Reconnecting...", device.name,
|
||||
"Decora connect error for device %s. Reconnecting...",
|
||||
device.name,
|
||||
)
|
||||
# pylint: disable=protected-access
|
||||
device._switch.connect()
|
||||
|
|
|
@ -59,7 +59,8 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
|||
default=self.config_entry.options.get(CONF_BOOLEAN, False),
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_INT, default=self.config_entry.options.get(CONF_INT, 10),
|
||||
CONF_INT,
|
||||
default=self.config_entry.options.get(CONF_INT, 10),
|
||||
): int,
|
||||
}
|
||||
),
|
||||
|
@ -77,7 +78,10 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
|||
{
|
||||
vol.Optional(
|
||||
CONF_STRING,
|
||||
default=self.config_entry.options.get(CONF_STRING, "Default",),
|
||||
default=self.config_entry.options.get(
|
||||
CONF_STRING,
|
||||
"Default",
|
||||
),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_SELECT,
|
||||
|
|
|
@ -50,8 +50,8 @@ class DevoloCoverDeviceEntity(DevoloDeviceEntity, CoverEntity):
|
|||
sync=self._sync,
|
||||
)
|
||||
|
||||
self._multi_level_switch_property = device_instance.multi_level_switch_property.get(
|
||||
element_uid
|
||||
self._multi_level_switch_property = (
|
||||
device_instance.multi_level_switch_property.get(element_uid)
|
||||
)
|
||||
|
||||
self._position = self._multi_level_switch_property.value
|
||||
|
|
|
@ -44,7 +44,9 @@ async def async_setup(hass: HomeAssistant, config: Dict) -> bool:
|
|||
for entry_config in config[DOMAIN]:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=entry_config,
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=entry_config,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -121,7 +121,8 @@ class DirecTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.discovery_info[CONF_NAME], data=self.discovery_info,
|
||||
title=self.discovery_info[CONF_NAME],
|
||||
data=self.discovery_info,
|
||||
)
|
||||
|
||||
def _show_setup_form(self, errors: Optional[Dict] = None) -> Dict[str, Any]:
|
||||
|
|
|
@ -70,7 +70,9 @@ async def async_setup_entry(
|
|||
for location in dtv.device.locations:
|
||||
entities.append(
|
||||
DIRECTVMediaPlayer(
|
||||
dtv=dtv, name=str.title(location.name), address=location.address,
|
||||
dtv=dtv,
|
||||
name=str.title(location.name),
|
||||
address=location.address,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -83,7 +85,9 @@ class DIRECTVMediaPlayer(DIRECTVEntity, MediaPlayerEntity):
|
|||
def __init__(self, *, dtv: DIRECTV, name: str, address: str = "0") -> None:
|
||||
"""Initialize DirecTV media player."""
|
||||
super().__init__(
|
||||
dtv=dtv, name=name, address=address,
|
||||
dtv=dtv,
|
||||
name=name,
|
||||
address=address,
|
||||
)
|
||||
|
||||
self._assumed_state = None
|
||||
|
|
|
@ -29,7 +29,9 @@ async def async_setup_entry(
|
|||
for location in dtv.device.locations:
|
||||
entities.append(
|
||||
DIRECTVRemote(
|
||||
dtv=dtv, name=str.title(location.name), address=location.address,
|
||||
dtv=dtv,
|
||||
name=str.title(location.name),
|
||||
address=location.address,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -42,7 +44,9 @@ class DIRECTVRemote(DIRECTVEntity, RemoteEntity):
|
|||
def __init__(self, *, dtv: DIRECTV, name: str, address: str = "0") -> None:
|
||||
"""Initialize DirecTV remote."""
|
||||
super().__init__(
|
||||
dtv=dtv, name=name, address=address,
|
||||
dtv=dtv,
|
||||
name=name,
|
||||
address=address,
|
||||
)
|
||||
|
||||
self._available = False
|
||||
|
|
|
@ -32,7 +32,8 @@ class UKFloodsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
await self.async_set_unique_id(station, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input["station"], data={"station": station},
|
||||
title=user_input["station"],
|
||||
data={"station": station},
|
||||
)
|
||||
|
||||
session = async_get_clientsession(hass=self.hass)
|
||||
|
|
|
@ -24,7 +24,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Elgato Key Light from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
elgato = Elgato(entry.data[CONF_HOST], port=entry.data[CONF_PORT], session=session,)
|
||||
elgato = Elgato(
|
||||
entry.data[CONF_HOST],
|
||||
port=entry.data[CONF_PORT],
|
||||
session=session,
|
||||
)
|
||||
|
||||
# Ensure we can connect to it
|
||||
try:
|
||||
|
|
|
@ -132,5 +132,9 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
async def _get_elgato_info(self, host: str, port: int) -> Info:
|
||||
"""Get device information from an Elgato Key Light device."""
|
||||
session = async_get_clientsession(self.hass)
|
||||
elgato = Elgato(host, port=port, session=session,)
|
||||
elgato = Elgato(
|
||||
host,
|
||||
port=port,
|
||||
session=session,
|
||||
)
|
||||
return await elgato.info()
|
||||
|
|
|
@ -49,7 +49,10 @@ class ElgatoLight(LightEntity):
|
|||
"""Defines a Elgato Key Light."""
|
||||
|
||||
def __init__(
|
||||
self, entry_id: str, elgato: Elgato, info: Info,
|
||||
self,
|
||||
entry_id: str,
|
||||
elgato: Elgato,
|
||||
info: Info,
|
||||
):
|
||||
"""Initialize Elgato Key Light."""
|
||||
self._brightness: Optional[int] = None
|
||||
|
|
|
@ -173,7 +173,9 @@ async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool:
|
|||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf,
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=conf,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -57,7 +57,10 @@ class DescriptionXmlView(HomeAssistantView):
|
|||
|
||||
@core.callback
|
||||
def create_upnp_datagram_endpoint(
|
||||
host_ip_addr, upnp_bind_multicast, advertise_ip, advertise_port,
|
||||
host_ip_addr,
|
||||
upnp_bind_multicast,
|
||||
advertise_ip,
|
||||
advertise_port,
|
||||
):
|
||||
"""Create the UPNP socket and protocol."""
|
||||
|
||||
|
|
|
@ -146,8 +146,8 @@ class FibaroThermostat(FibaroDevice, ClimateEntity):
|
|||
self._unit_of_temp = TEMP_CELSIUS
|
||||
|
||||
if self._fan_mode_device:
|
||||
fan_modes = self._fan_mode_device.fibaro_device.properties.supportedModes.split(
|
||||
","
|
||||
fan_modes = (
|
||||
self._fan_mode_device.fibaro_device.properties.supportedModes.split(",")
|
||||
)
|
||||
for mode in fan_modes:
|
||||
mode = int(mode)
|
||||
|
|
|
@ -61,7 +61,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||
)
|
||||
)
|
||||
flume_devices = await hass.async_add_executor_job(
|
||||
partial(FlumeDeviceList, flume_auth, http_session=http_session,)
|
||||
partial(
|
||||
FlumeDeviceList,
|
||||
flume_auth,
|
||||
http_session=http_session,
|
||||
)
|
||||
)
|
||||
except RequestException:
|
||||
raise ConfigEntryNotReady
|
||||
|
|
|
@ -184,7 +184,9 @@ class FluNearYouData:
|
|||
# If this is the first registration we have, start a time interval:
|
||||
if not self._async_cancel_time_interval_listener:
|
||||
self._async_cancel_time_interval_listener = async_track_time_interval(
|
||||
self._hass, self._async_update_listener_action, DEFAULT_SCAN_INTERVAL,
|
||||
self._hass,
|
||||
self._async_update_listener_action,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
api_category = async_get_api_category(sensor_type)
|
||||
|
|
|
@ -176,7 +176,8 @@ class ForkedDaapdFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
if entry.data.get(CONF_HOST) != discovery_info["host"]:
|
||||
continue
|
||||
self.hass.config_entries.async_update_entry(
|
||||
entry, title=discovery_info["properties"]["Machine Name"],
|
||||
entry,
|
||||
title=discovery_info["properties"]["Machine Name"],
|
||||
)
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
|
|
|
@ -50,7 +50,9 @@ async def async_setup(hass, config):
|
|||
for freebox_conf in conf:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=freebox_conf,
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=freebox_conf,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -82,7 +82,8 @@ class FreeboxFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
await fbx.close()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self._host, data={CONF_HOST: self._host, CONF_PORT: self._port},
|
||||
title=self._host,
|
||||
data={CONF_HOST: self._host, CONF_PORT: self._port},
|
||||
)
|
||||
|
||||
except AuthorizationError as error:
|
||||
|
|
|
@ -87,7 +87,8 @@ SET_ZONE_OVERRIDE_SCHEMA = vol.Schema(
|
|||
vol.Coerce(float), vol.Range(min=4, max=28)
|
||||
),
|
||||
vol.Optional(ATTR_DURATION): vol.All(
|
||||
cv.time_period, vol.Range(min=timedelta(minutes=5), max=timedelta(days=1)),
|
||||
cv.time_period,
|
||||
vol.Range(min=timedelta(minutes=5), max=timedelta(days=1)),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
|
|
@ -44,7 +44,8 @@ class GiosFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
await gios.update()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_STATION_ID], data=user_input,
|
||||
title=user_input[CONF_STATION_ID],
|
||||
data=user_input,
|
||||
)
|
||||
except (ApiError, ClientConnectorError, asyncio.TimeoutError):
|
||||
errors["base"] = "cannot_connect"
|
||||
|
|
|
@ -157,7 +157,8 @@ class GlancesSensor(Entity):
|
|||
self._state = round(disk["free"] / 1024 ** 3, 1)
|
||||
except KeyError:
|
||||
self._state = round(
|
||||
(disk["size"] - disk["used"]) / 1024 ** 3, 1,
|
||||
(disk["size"] - disk["used"]) / 1024 ** 3,
|
||||
1,
|
||||
)
|
||||
elif self.type == "sensor_temp":
|
||||
for sensor in value["sensors"]:
|
||||
|
|
|
@ -208,7 +208,11 @@ class AbstractConfig(ABC):
|
|||
return
|
||||
|
||||
webhook.async_register(
|
||||
self.hass, DOMAIN, "Local Support", webhook_id, self._handle_local_webhook,
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"Local Support",
|
||||
webhook_id,
|
||||
self._handle_local_webhook,
|
||||
)
|
||||
|
||||
self._local_sdk_active = True
|
||||
|
|
|
@ -103,7 +103,10 @@ class CoverGroup(GroupEntity, CoverEntity):
|
|||
)
|
||||
|
||||
async def async_update_supported_features(
|
||||
self, entity_id: str, new_state: Optional[State], update_state: bool = True,
|
||||
self,
|
||||
entity_id: str,
|
||||
new_state: Optional[State],
|
||||
update_state: bool = True,
|
||||
) -> None:
|
||||
"""Update dictionaries with supported features."""
|
||||
if not new_state:
|
||||
|
|
|
@ -156,7 +156,9 @@ class GuardianSwitch(GuardianEntity, SwitchEntity):
|
|||
try:
|
||||
async with self._client:
|
||||
await self._client.system.upgrade_firmware(
|
||||
url=url, port=port, filename=filename,
|
||||
url=url,
|
||||
port=port,
|
||||
filename=filename,
|
||||
)
|
||||
except GuardianError as err:
|
||||
LOGGER.error("Error during service call: %s", err)
|
||||
|
|
|
@ -122,7 +122,9 @@ async def async_setup_entry(
|
|||
platform = entity_platform.current_platform.get()
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SYNC, HARMONY_SYNC_SCHEMA, "sync",
|
||||
SERVICE_SYNC,
|
||||
HARMONY_SYNC_SCHEMA,
|
||||
"sync",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_CHANGE_CHANNEL, HARMONY_CHANGE_CHANNEL_SCHEMA, "change_channel"
|
||||
|
|
|
@ -60,7 +60,8 @@ async def async_setup(hass, config):
|
|||
hass.data[DOMAIN] = conf
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_IMPORT},
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -297,7 +297,8 @@ def _get_states_with_session(
|
|||
most_recent_state_ids = most_recent_state_ids.subquery()
|
||||
|
||||
query = query.join(
|
||||
most_recent_state_ids, States.state_id == most_recent_state_ids.c.max_state_id,
|
||||
most_recent_state_ids,
|
||||
States.state_id == most_recent_state_ids.c.max_state_id,
|
||||
).filter(~States.domain.in_(IGNORE_DOMAINS))
|
||||
|
||||
if filters:
|
||||
|
|
|
@ -59,8 +59,10 @@ async def async_setup(hass: HomeAssistant, config: dict) -> bool:
|
|||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Home Connect from a config entry."""
|
||||
implementation = await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
|
||||
hc_api = api.ConfigEntryAuth(hass, entry, implementation)
|
||||
|
|
|
@ -103,7 +103,9 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity):
|
|||
_LOGGER.debug("Tried to switch on %s", self.name)
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
self.device.appliance.set_setting, BSH_POWER_STATE, BSH_POWER_ON,
|
||||
self.device.appliance.set_setting,
|
||||
BSH_POWER_STATE,
|
||||
BSH_POWER_ON,
|
||||
)
|
||||
except HomeConnectError as err:
|
||||
_LOGGER.error("Error while trying to turn on device: %s", err)
|
||||
|
|
|
@ -143,7 +143,11 @@ async def async_attach_trigger(
|
|||
return cur_value == new_value
|
||||
|
||||
unsub_track_same[entity] = async_track_same_state(
|
||||
hass, period[entity], call_action, _check_same_state, entity_ids=entity,
|
||||
hass,
|
||||
period[entity],
|
||||
call_action,
|
||||
_check_same_state,
|
||||
entity_ids=entity,
|
||||
)
|
||||
|
||||
unsub = async_track_state_change_event(hass, entity_id, state_automation_listener)
|
||||
|
|
|
@ -172,7 +172,9 @@ async def async_setup(hass: HomeAssistant, config: dict):
|
|||
conf[CONF_ENTRY_INDEX] = index
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf,
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=conf,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -632,14 +634,16 @@ class HomeKit:
|
|||
)
|
||||
if motion_binary_sensor_entity_id:
|
||||
self._config.setdefault(state.entity_id, {}).setdefault(
|
||||
CONF_LINKED_MOTION_SENSOR, motion_binary_sensor_entity_id,
|
||||
CONF_LINKED_MOTION_SENSOR,
|
||||
motion_binary_sensor_entity_id,
|
||||
)
|
||||
doorbell_binary_sensor_entity_id = device_lookup[ent_reg_ent.device_id].get(
|
||||
(BINARY_SENSOR_DOMAIN, DEVICE_CLASS_OCCUPANCY)
|
||||
)
|
||||
if doorbell_binary_sensor_entity_id:
|
||||
self._config.setdefault(state.entity_id, {}).setdefault(
|
||||
CONF_LINKED_DOORBELL_SENSOR, doorbell_binary_sensor_entity_id,
|
||||
CONF_LINKED_DOORBELL_SENSOR,
|
||||
doorbell_binary_sensor_entity_id,
|
||||
)
|
||||
|
||||
if state.entity_id.startswith(f"{HUMIDIFIER_DOMAIN}."):
|
||||
|
@ -648,7 +652,8 @@ class HomeKit:
|
|||
].get((SENSOR_DOMAIN, DEVICE_CLASS_HUMIDITY))
|
||||
if current_humidity_sensor_entity_id:
|
||||
self._config.setdefault(state.entity_id, {}).setdefault(
|
||||
CONF_LINKED_HUMIDITY_SENSOR, current_humidity_sensor_entity_id,
|
||||
CONF_LINKED_HUMIDITY_SENSOR,
|
||||
current_humidity_sensor_entity_id,
|
||||
)
|
||||
|
||||
async def _async_set_device_info_attributes(self, ent_reg_ent, dev_reg, entity_id):
|
||||
|
|
|
@ -261,7 +261,8 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
|||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CAMERA_COPY, default=cameras_with_copy,
|
||||
CONF_CAMERA_COPY,
|
||||
default=cameras_with_copy,
|
||||
): cv.multi_select(self.included_cameras),
|
||||
}
|
||||
)
|
||||
|
|
|
@ -27,7 +27,9 @@ def scale_jpeg_camera_image(cam_image, width, height):
|
|||
break
|
||||
|
||||
return turbo_jpeg.scale_with_quality(
|
||||
cam_image.content, scaling_factor=scaling_factor, quality=75,
|
||||
cam_image.content,
|
||||
scaling_factor=scaling_factor,
|
||||
quality=75,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -220,15 +220,18 @@ class Camera(HomeAccessory, PyhapCamera):
|
|||
serv_doorbell = self.add_preload_service(SERV_DOORBELL)
|
||||
self.set_primary_service(serv_doorbell)
|
||||
self._char_doorbell_detected = serv_doorbell.configure_char(
|
||||
CHAR_PROGRAMMABLE_SWITCH_EVENT, value=0,
|
||||
CHAR_PROGRAMMABLE_SWITCH_EVENT,
|
||||
value=0,
|
||||
)
|
||||
serv_stateless_switch = self.add_preload_service(
|
||||
SERV_STATELESS_PROGRAMMABLE_SWITCH
|
||||
)
|
||||
self._char_doorbell_detected_switch = serv_stateless_switch.configure_char(
|
||||
CHAR_PROGRAMMABLE_SWITCH_EVENT,
|
||||
value=0,
|
||||
valid_values={"SinglePress": DOORBELL_SINGLE_PRESS},
|
||||
self._char_doorbell_detected_switch = (
|
||||
serv_stateless_switch.configure_char(
|
||||
CHAR_PROGRAMMABLE_SWITCH_EVENT,
|
||||
value=0,
|
||||
valid_values={"SinglePress": DOORBELL_SINGLE_PRESS},
|
||||
)
|
||||
)
|
||||
serv_speaker = self.add_preload_service(SERV_SPEAKER)
|
||||
serv_speaker.configure_char(CHAR_MUTE, value=0)
|
||||
|
@ -387,7 +390,9 @@ class Camera(HomeAccessory, PyhapCamera):
|
|||
await self._async_ffmpeg_watch(session_info["id"])
|
||||
|
||||
session_info[FFMPEG_WATCHER] = async_track_time_interval(
|
||||
self.hass, watch_session, FFMPEG_WATCH_INTERVAL,
|
||||
self.hass,
|
||||
watch_session,
|
||||
FFMPEG_WATCH_INTERVAL,
|
||||
)
|
||||
|
||||
return await self._async_ffmpeg_watch(session_info["id"])
|
||||
|
|
|
@ -88,17 +88,21 @@ class HumidifierDehumidifier(HomeAccessory):
|
|||
)
|
||||
|
||||
# Current and target mode characteristics
|
||||
self.char_current_humidifier_dehumidifier = serv_humidifier_dehumidifier.configure_char(
|
||||
CHAR_CURRENT_HUMIDIFIER_DEHUMIDIFIER, value=0
|
||||
self.char_current_humidifier_dehumidifier = (
|
||||
serv_humidifier_dehumidifier.configure_char(
|
||||
CHAR_CURRENT_HUMIDIFIER_DEHUMIDIFIER, value=0
|
||||
)
|
||||
)
|
||||
self.char_target_humidifier_dehumidifier = serv_humidifier_dehumidifier.configure_char(
|
||||
CHAR_TARGET_HUMIDIFIER_DEHUMIDIFIER,
|
||||
value=self._hk_device_class,
|
||||
valid_values={
|
||||
HC_HASS_TO_HOMEKIT_DEVICE_CLASS_NAME[
|
||||
device_class
|
||||
]: self._hk_device_class
|
||||
},
|
||||
self.char_target_humidifier_dehumidifier = (
|
||||
serv_humidifier_dehumidifier.configure_char(
|
||||
CHAR_TARGET_HUMIDIFIER_DEHUMIDIFIER,
|
||||
value=self._hk_device_class,
|
||||
valid_values={
|
||||
HC_HASS_TO_HOMEKIT_DEVICE_CLASS_NAME[
|
||||
device_class
|
||||
]: self._hk_device_class
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# Current and target humidity characteristics
|
||||
|
|
|
@ -231,7 +231,9 @@ class MediaPlayer(HomeAccessory):
|
|||
if self.chars[FEATURE_PLAY_STOP]:
|
||||
hk_state = current_state == STATE_PLAYING
|
||||
_LOGGER.debug(
|
||||
'%s: Set current state for "play_stop" to %s', self.entity_id, hk_state,
|
||||
'%s: Set current state for "play_stop" to %s',
|
||||
self.entity_id,
|
||||
hk_state,
|
||||
)
|
||||
if self.chars[FEATURE_PLAY_STOP].value != hk_state:
|
||||
self.chars[FEATURE_PLAY_STOP].set_value(hk_state)
|
||||
|
@ -414,7 +416,9 @@ class TelevisionMediaPlayer(HomeAccessory):
|
|||
if CHAR_VOLUME_SELECTOR in self.chars_speaker:
|
||||
current_mute_state = bool(new_state.attributes.get(ATTR_MEDIA_VOLUME_MUTED))
|
||||
_LOGGER.debug(
|
||||
"%s: Set current mute state to %s", self.entity_id, current_mute_state,
|
||||
"%s: Set current mute state to %s",
|
||||
self.entity_id,
|
||||
current_mute_state,
|
||||
)
|
||||
if self.char_mute.value != current_mute_state:
|
||||
self.char_mute.set_value(current_mute_state)
|
||||
|
@ -429,7 +433,8 @@ class TelevisionMediaPlayer(HomeAccessory):
|
|||
self.char_input_source.set_value(index)
|
||||
elif hk_state:
|
||||
_LOGGER.warning(
|
||||
"%s: Sources out of sync. Restart Home Assistant", self.entity_id,
|
||||
"%s: Sources out of sync. Restart Home Assistant",
|
||||
self.entity_id,
|
||||
)
|
||||
if self.char_input_source.value != 0:
|
||||
self.char_input_source.set_value(0)
|
||||
|
|
|
@ -325,7 +325,10 @@ class Thermostat(HomeAccessory):
|
|||
if service:
|
||||
params[ATTR_ENTITY_ID] = self.entity_id
|
||||
self.call_service(
|
||||
DOMAIN_CLIMATE, service, params, ", ".join(events),
|
||||
DOMAIN_CLIMATE,
|
||||
service,
|
||||
params,
|
||||
", ".join(events),
|
||||
)
|
||||
|
||||
if CHAR_TARGET_HUMIDITY in char_values:
|
||||
|
|
|
@ -287,7 +287,8 @@ class HomeKitHeaterCoolerEntity(HomeKitEntity, ClimateEntity):
|
|||
Requires SUPPORT_SWING_MODE.
|
||||
"""
|
||||
valid_values = clamp_enum_to_char(
|
||||
SwingModeValues, self.service[CharacteristicsTypes.SWING_MODE],
|
||||
SwingModeValues,
|
||||
self.service[CharacteristicsTypes.SWING_MODE],
|
||||
)
|
||||
return [SWING_MODE_HOMEKIT_TO_HASS[mode] for mode in valid_values]
|
||||
|
||||
|
|
|
@ -212,7 +212,8 @@ class HKDevice:
|
|||
)
|
||||
|
||||
device = device_registry.async_get_or_create(
|
||||
config_entry_id=self.config_entry.entry_id, **device_info,
|
||||
config_entry_id=self.config_entry.entry_id,
|
||||
**device_info,
|
||||
)
|
||||
|
||||
devices[accessory.aid] = device.id
|
||||
|
|
|
@ -129,7 +129,8 @@ class HomeKitTelevision(HomeKitEntity, MediaPlayerEntity):
|
|||
this_tv = this_accessory.services.iid(self._iid)
|
||||
|
||||
input_sources = this_accessory.services.filter(
|
||||
service_type=ServicesTypes.INPUT_SOURCE, parent_service=this_tv,
|
||||
service_type=ServicesTypes.INPUT_SOURCE,
|
||||
parent_service=this_tv,
|
||||
)
|
||||
|
||||
for input_source in input_sources:
|
||||
|
|
|
@ -130,7 +130,12 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
add_entities(
|
||||
[
|
||||
HoneywellUSThermostat(
|
||||
client, device, cool_away_temp, heat_away_temp, username, password,
|
||||
client,
|
||||
device,
|
||||
cool_away_temp,
|
||||
heat_away_temp,
|
||||
username,
|
||||
password,
|
||||
)
|
||||
for location in client.locations_by_id.values()
|
||||
for device in location.devices_by_id.values()
|
||||
|
|
|
@ -124,8 +124,9 @@ async def process_wrong_login(request):
|
|||
request.app[KEY_FAILED_LOGIN_ATTEMPTS][remote_addr] += 1
|
||||
|
||||
# Supervisor IP should never be banned
|
||||
if "hassio" in hass.config.components and hass.components.hassio.get_supervisor_ip() == str(
|
||||
remote_addr
|
||||
if (
|
||||
"hassio" in hass.config.components
|
||||
and hass.components.hassio.get_supervisor_ip() == str(remote_addr)
|
||||
):
|
||||
return
|
||||
|
||||
|
|
|
@ -43,7 +43,9 @@ class HomeAssistantView:
|
|||
|
||||
@staticmethod
|
||||
def json(
|
||||
result: Any, status_code: int = HTTP_OK, headers: Optional[LooseHeaders] = None,
|
||||
result: Any,
|
||||
status_code: int = HTTP_OK,
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
) -> web.Response:
|
||||
"""Return a JSON response."""
|
||||
try:
|
||||
|
@ -114,7 +116,10 @@ def request_handler_factory(view: HomeAssistantView, handler: Callable) -> Calla
|
|||
raise HTTPUnauthorized()
|
||||
|
||||
_LOGGER.debug(
|
||||
"Serving %s to %s (auth: %s)", request.path, request.remote, authenticated,
|
||||
"Serving %s to %s (auth: %s)",
|
||||
request.path,
|
||||
request.remote,
|
||||
authenticated,
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
@ -195,7 +195,8 @@ class Router:
|
|||
_LOGGER.debug("Trying to authorize again...")
|
||||
if self.connection.enforce_authorized_connection():
|
||||
_LOGGER.debug(
|
||||
"...success, %s will be updated by a future periodic run", key,
|
||||
"...success, %s will be updated by a future periodic run",
|
||||
key,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug("...failed")
|
||||
|
@ -523,7 +524,10 @@ async def async_setup(hass: HomeAssistantType, config) -> bool:
|
|||
|
||||
for service in ADMIN_SERVICES:
|
||||
hass.helpers.service.async_register_admin_service(
|
||||
DOMAIN, service, service_handler, schema=SERVICE_SCHEMA,
|
||||
DOMAIN,
|
||||
service,
|
||||
service_handler,
|
||||
schema=SERVICE_SCHEMA,
|
||||
)
|
||||
|
||||
for url, router_config in domain_config.items():
|
||||
|
|
|
@ -178,8 +178,12 @@ SENSOR_META = {
|
|||
name="Operator search mode",
|
||||
formatter=lambda x: ({"0": "Auto", "1": "Manual"}.get(x, "Unknown"), None),
|
||||
),
|
||||
(KEY_NET_CURRENT_PLMN, "FullName"): dict(name="Operator name",),
|
||||
(KEY_NET_CURRENT_PLMN, "Numeric"): dict(name="Operator code",),
|
||||
(KEY_NET_CURRENT_PLMN, "FullName"): dict(
|
||||
name="Operator name",
|
||||
),
|
||||
(KEY_NET_CURRENT_PLMN, "Numeric"): dict(
|
||||
name="Operator code",
|
||||
),
|
||||
KEY_NET_NET_MODE: dict(include=re.compile(r"^NetworkMode$", re.IGNORECASE)),
|
||||
(KEY_NET_NET_MODE, "NetworkMode"): dict(
|
||||
name="Preferred mode",
|
||||
|
@ -197,7 +201,8 @@ SENSOR_META = {
|
|||
),
|
||||
),
|
||||
(KEY_SMS_SMS_COUNT, "LocalUnread"): dict(
|
||||
name="SMS unread", icon="mdi:email-receive",
|
||||
name="SMS unread",
|
||||
icon="mdi:email-receive",
|
||||
),
|
||||
}
|
||||
|
||||
|
|
|
@ -44,7 +44,8 @@ CONFIG_SCHEMA = vol.Schema(
|
|||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_BRIDGES): vol.All(
|
||||
cv.ensure_list, [BRIDGE_CONFIG_SCHEMA],
|
||||
cv.ensure_list,
|
||||
[BRIDGE_CONFIG_SCHEMA],
|
||||
)
|
||||
}
|
||||
)
|
||||
|
@ -149,7 +150,8 @@ async def async_setup_entry(
|
|||
|
||||
if options:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, options={**entry.options, **options},
|
||||
entry,
|
||||
options={**entry.options, **options},
|
||||
)
|
||||
|
||||
bridge = HueBridge(hass, entry)
|
||||
|
|
|
@ -150,7 +150,9 @@ class SensorManager:
|
|||
|
||||
self.bridge.hass.async_create_task(
|
||||
remove_devices(
|
||||
self.bridge, [value.uniqueid for value in api.values()], current,
|
||||
self.bridge,
|
||||
[value.uniqueid for value in api.values()],
|
||||
current,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -41,7 +41,11 @@ async def async_setup(hass: HomeAssistant, config: dict):
|
|||
hass.data[DOMAIN] = storage_collection = ImageStorageCollection(hass, image_dir)
|
||||
await storage_collection.async_load()
|
||||
collection.StorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS,
|
||||
storage_collection,
|
||||
DOMAIN,
|
||||
DOMAIN,
|
||||
CREATE_FIELDS,
|
||||
UPDATE_FIELDS,
|
||||
).async_setup(hass, create_create=False)
|
||||
|
||||
hass.http.register_view(ImageUploadView)
|
||||
|
@ -94,7 +98,7 @@ class ImageStorageCollection(collection.StorageCollection):
|
|||
# Reset content
|
||||
uploaded_file.file.seek(0)
|
||||
|
||||
media_folder: pathlib.Path = (self.image_dir / data[CONF_ID])
|
||||
media_folder: pathlib.Path = self.image_dir / data[CONF_ID]
|
||||
media_folder.mkdir(parents=True)
|
||||
|
||||
media_file = media_folder / "original"
|
||||
|
|
|
@ -179,7 +179,10 @@ INFLUX_SCHEMA = vol.All(
|
|||
create_influx_url,
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({DOMAIN: INFLUX_SCHEMA}, extra=vol.ALLOW_EXTRA,)
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: INFLUX_SCHEMA},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def _generate_event_to_json(conf: Dict) -> Callable[[Dict], str]:
|
||||
|
|
|
@ -129,7 +129,8 @@ RENDERING_WHERE_ERROR_MESSAGE = "Could not render where template: %s."
|
|||
COMPONENT_CONFIG_SCHEMA_CONNECTION = {
|
||||
# Connection config for V1 and V2 APIs.
|
||||
vol.Optional(CONF_API_VERSION, default=DEFAULT_API_VERSION): vol.All(
|
||||
vol.Coerce(str), vol.In([DEFAULT_API_VERSION, API_VERSION_2]),
|
||||
vol.Coerce(str),
|
||||
vol.In([DEFAULT_API_VERSION, API_VERSION_2]),
|
||||
),
|
||||
vol.Optional(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_PATH): cv.string,
|
||||
|
|
|
@ -109,7 +109,8 @@ async def async_import_config(hass, conf):
|
|||
if result["type"] == RESULT_TYPE_CREATE_ENTRY and options:
|
||||
entry = result["result"]
|
||||
hass.config_entries.async_update_entry(
|
||||
entry=entry, options=options,
|
||||
entry=entry,
|
||||
options=options,
|
||||
)
|
||||
return result
|
||||
|
||||
|
|
|
@ -248,7 +248,8 @@ class InsteonOptionsFlowHandler(config_entries.OptionsFlow):
|
|||
data[CONF_PASSWORD] = user_input[CONF_PASSWORD]
|
||||
self.hass.config_entries.async_update_entry(self.config_entry, data=data)
|
||||
return self.async_create_entry(
|
||||
title="", data={**self.config_entry.options},
|
||||
title="",
|
||||
data={**self.config_entry.options},
|
||||
)
|
||||
data_schema = build_hub_schema(**self.config_entry.data)
|
||||
return self.async_show_form(
|
||||
|
@ -291,7 +292,9 @@ class InsteonOptionsFlowHandler(config_entries.OptionsFlow):
|
|||
if user_input is not None:
|
||||
options = _remove_override(user_input[CONF_ADDRESS], options)
|
||||
async_dispatcher_send(
|
||||
self.hass, SIGNAL_REMOVE_DEVICE_OVERRIDE, user_input[CONF_ADDRESS],
|
||||
self.hass,
|
||||
SIGNAL_REMOVE_DEVICE_OVERRIDE,
|
||||
user_input[CONF_ADDRESS],
|
||||
)
|
||||
return self.async_create_entry(title="", data=options)
|
||||
|
||||
|
|
|
@ -94,7 +94,10 @@ class InsteonEntity(Entity):
|
|||
def async_entity_update(self, name, address, value, group):
|
||||
"""Receive notification from transport that new data exists."""
|
||||
_LOGGER.debug(
|
||||
"Received update for device %s group %d value %s", address, group, value,
|
||||
"Received update for device %s group %d value %s",
|
||||
address,
|
||||
group,
|
||||
value,
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
|
|
@ -110,7 +110,10 @@ class IPPDataUpdateCoordinator(DataUpdateCoordinator[IPPPrinter]):
|
|||
)
|
||||
|
||||
super().__init__(
|
||||
hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL,
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> IPPPrinter:
|
||||
|
|
|
@ -177,7 +177,8 @@ class IPPFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.discovery_info[CONF_NAME], data=self.discovery_info,
|
||||
title=self.discovery_info[CONF_NAME],
|
||||
data=self.discovery_info,
|
||||
)
|
||||
|
||||
def _show_setup_form(self, errors: Optional[Dict] = None) -> Dict[str, Any]:
|
||||
|
|
|
@ -180,7 +180,9 @@ class IQVIAData:
|
|||
# If this is the first registration we have, start a time interval:
|
||||
if not self._async_cancel_time_interval_listener:
|
||||
self._async_cancel_time_interval_listener = async_track_time_interval(
|
||||
self._hass, self._async_update_listener_action, DEFAULT_SCAN_INTERVAL,
|
||||
self._hass,
|
||||
self._async_update_listener_action,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
api_category = async_get_api_category(sensor_type)
|
||||
|
|
|
@ -400,7 +400,10 @@ class KNXExposeSensor:
|
|||
else:
|
||||
_name = self.entity_id
|
||||
self.device = ExposeSensor(
|
||||
self.xknx, name=_name, group_address=self.address, value_type=self.type,
|
||||
self.xknx,
|
||||
name=_name,
|
||||
group_address=self.address,
|
||||
value_type=self.type,
|
||||
)
|
||||
self.xknx.devices.add(self.device)
|
||||
async_track_state_change_event(
|
||||
|
|
|
@ -218,7 +218,9 @@ def _create_sensor(knx_module: XKNX, config: ConfigType) -> XknxSensor:
|
|||
def _create_notify(knx_module: XKNX, config: ConfigType) -> XknxNotification:
|
||||
"""Return a KNX notification to be used within XKNX."""
|
||||
return XknxNotification(
|
||||
knx_module, name=config[CONF_NAME], group_address=config[CONF_ADDRESS],
|
||||
knx_module,
|
||||
name=config[CONF_NAME],
|
||||
group_address=config[CONF_ADDRESS],
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -57,7 +57,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||
raise ConfigEntryNotReady from error
|
||||
except InvalidAuthError as error:
|
||||
_LOGGER.error(
|
||||
"Login to %s failed: [%s]", entry.data[CONF_HOST], error,
|
||||
"Login to %s failed: [%s]",
|
||||
entry.data[CONF_HOST],
|
||||
error,
|
||||
)
|
||||
return False
|
||||
|
||||
|
|
|
@ -284,7 +284,8 @@ class KodiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
@callback
|
||||
def _create_entry(self):
|
||||
return self.async_create_entry(
|
||||
title=self._name or self._host, data=self._get_data(),
|
||||
title=self._name or self._host,
|
||||
data=self._get_data(),
|
||||
)
|
||||
|
||||
@callback
|
||||
|
|
|
@ -349,7 +349,8 @@ class KonnectedFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
) or "".join(random.choices(f"{string.ascii_uppercase}{string.digits}", k=20))
|
||||
|
||||
return self.async_create_entry(
|
||||
title=KONN_PANEL_MODEL_NAMES[self.data[CONF_MODEL]], data=self.data,
|
||||
title=KONN_PANEL_MODEL_NAMES[self.data[CONF_MODEL]],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -168,12 +168,20 @@ class AlarmPanel:
|
|||
if self.client:
|
||||
if self.api_version == CONF_ZONE:
|
||||
return await self.client.put_zone(
|
||||
zone, state, momentary, times, pause,
|
||||
zone,
|
||||
state,
|
||||
momentary,
|
||||
times,
|
||||
pause,
|
||||
)
|
||||
|
||||
# device endpoint uses pin number instead of zone
|
||||
return await self.client.put_device(
|
||||
ZONE_TO_PIN[zone], state, momentary, times, pause,
|
||||
ZONE_TO_PIN[zone],
|
||||
state,
|
||||
momentary,
|
||||
times,
|
||||
pause,
|
||||
)
|
||||
|
||||
except self.client.ClientError as err:
|
||||
|
@ -208,7 +216,8 @@ class AlarmPanel:
|
|||
act = {
|
||||
CONF_ZONE: zone,
|
||||
CONF_NAME: entity.get(
|
||||
CONF_NAME, f"Konnected {self.device_id[6:]} Actuator {zone}",
|
||||
CONF_NAME,
|
||||
f"Konnected {self.device_id[6:]} Actuator {zone}",
|
||||
),
|
||||
ATTR_STATE: None,
|
||||
CONF_ACTIVATION: entity[CONF_ACTIVATION],
|
||||
|
|
|
@ -469,5 +469,6 @@ class Light(LightEntity):
|
|||
"""Print deprecation warning."""
|
||||
super().__init_subclass__(**kwargs)
|
||||
_LOGGER.warning(
|
||||
"Light is deprecated, modify %s to extend LightEntity", cls.__name__,
|
||||
"Light is deprecated, modify %s to extend LightEntity",
|
||||
cls.__name__,
|
||||
)
|
||||
|
|
|
@ -145,5 +145,6 @@ class LockDevice(LockEntity):
|
|||
"""Print deprecation warning."""
|
||||
super().__init_subclass__(**kwargs)
|
||||
_LOGGER.warning(
|
||||
"LockDevice is deprecated, modify %s to extend LockEntity", cls.__name__,
|
||||
"LockDevice is deprecated, modify %s to extend LockEntity",
|
||||
cls.__name__,
|
||||
)
|
||||
|
|
|
@ -58,7 +58,8 @@ CONFIG_SCHEMA = vol.Schema(
|
|||
vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])
|
||||
),
|
||||
vol.Optional(CONF_DASHBOARDS): cv.schema_with_slug_keys(
|
||||
YAML_DASHBOARD_SCHEMA, slug_validator=url_slug,
|
||||
YAML_DASHBOARD_SCHEMA,
|
||||
slug_validator=url_slug,
|
||||
),
|
||||
vol.Optional(CONF_RESOURCES): [RESOURCE_SCHEMA],
|
||||
}
|
||||
|
|
|
@ -33,7 +33,8 @@ SCAN_INTERVAL = timedelta(minutes=15)
|
|||
CITY_SCHEMA = vol.Schema({vol.Required(CONF_CITY): cv.string})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: vol.Schema(vol.All(cv.ensure_list, [CITY_SCHEMA]))}, extra=vol.ALLOW_EXTRA,
|
||||
{DOMAIN: vol.Schema(vol.All(cv.ensure_list, [CITY_SCHEMA]))},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
|
@ -130,7 +131,9 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool
|
|||
|
||||
department = coordinator_forecast.data.position.get("dept")
|
||||
_LOGGER.debug(
|
||||
"Department corresponding to %s is %s", entry.title, department,
|
||||
"Department corresponding to %s is %s",
|
||||
entry.title,
|
||||
department,
|
||||
)
|
||||
if is_valid_warning_department(department):
|
||||
if not hass.data[DOMAIN].get(department):
|
||||
|
|
|
@ -74,7 +74,8 @@ class MeteoFranceFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=city, data={CONF_LATITUDE: latitude, CONF_LONGITUDE: longitude},
|
||||
title=city,
|
||||
data={CONF_LATITUDE: latitude, CONF_LONGITUDE: longitude},
|
||||
)
|
||||
|
||||
async def async_step_import(self, user_input):
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue