Log lines do not end with a full stop (#37527)
parent
01fd33f173
commit
53545c984b
|
@ -75,7 +75,7 @@ class CommandLineAuthProvider(AuthProvider):
|
|||
|
||||
if process.returncode != 0:
|
||||
_LOGGER.error(
|
||||
"User %r failed to authenticate, command exited with code %d.",
|
||||
"User %r failed to authenticate, command exited with code %d",
|
||||
username,
|
||||
process.returncode,
|
||||
)
|
||||
|
|
|
@ -37,7 +37,7 @@ def is_on(hass, entity_id=None):
|
|||
continue
|
||||
|
||||
if not hasattr(component, "is_on"):
|
||||
_LOGGER.warning("Integration %s has no is_on method.", domain)
|
||||
_LOGGER.warning("Integration %s has no is_on method", domain)
|
||||
continue
|
||||
|
||||
if component.is_on(ent_id):
|
||||
|
|
|
@ -183,7 +183,7 @@ class AdGuardHomeEntity(Entity):
|
|||
except AdGuardHomeError:
|
||||
if self._available:
|
||||
_LOGGER.debug(
|
||||
"An error occurred while updating AdGuard Home sensor.",
|
||||
"An error occurred while updating AdGuard Home sensor",
|
||||
exc_info=True,
|
||||
)
|
||||
self._available = False
|
||||
|
|
|
@ -73,7 +73,7 @@ class AdGuardHomeSwitch(AdGuardHomeDeviceEntity, SwitchEntity):
|
|||
try:
|
||||
await self._adguard_turn_off()
|
||||
except AdGuardHomeError:
|
||||
_LOGGER.error("An error occurred while turning off AdGuard Home switch.")
|
||||
_LOGGER.error("An error occurred while turning off AdGuard Home switch")
|
||||
self._available = False
|
||||
|
||||
async def _adguard_turn_off(self) -> None:
|
||||
|
@ -85,7 +85,7 @@ class AdGuardHomeSwitch(AdGuardHomeDeviceEntity, SwitchEntity):
|
|||
try:
|
||||
await self._adguard_turn_on()
|
||||
except AdGuardHomeError:
|
||||
_LOGGER.error("An error occurred while turning on AdGuard Home switch.")
|
||||
_LOGGER.error("An error occurred while turning on AdGuard Home switch")
|
||||
self._available = False
|
||||
|
||||
async def _adguard_turn_on(self) -> None:
|
||||
|
|
|
@ -162,7 +162,7 @@ def setup(hass, config):
|
|||
if not restart:
|
||||
return
|
||||
restart = False
|
||||
_LOGGER.warning("AlarmDecoder unexpectedly lost connection.")
|
||||
_LOGGER.warning("AlarmDecoder unexpectedly lost connection")
|
||||
hass.add_job(open_connection)
|
||||
|
||||
def handle_message(sender, message):
|
||||
|
|
|
@ -70,11 +70,11 @@ class Auth:
|
|||
await self.async_load_preferences()
|
||||
|
||||
if self.is_token_valid():
|
||||
_LOGGER.debug("Token still valid, using it.")
|
||||
_LOGGER.debug("Token still valid, using it")
|
||||
return self._prefs[STORAGE_ACCESS_TOKEN]
|
||||
|
||||
if self._prefs[STORAGE_REFRESH_TOKEN] is None:
|
||||
_LOGGER.debug("Token invalid and no refresh token available.")
|
||||
_LOGGER.debug("Token invalid and no refresh token available")
|
||||
return None
|
||||
|
||||
lwa_params = {
|
||||
|
@ -84,7 +84,7 @@ class Auth:
|
|||
CONF_CLIENT_SECRET: self.client_secret,
|
||||
}
|
||||
|
||||
_LOGGER.debug("Calling LWA to refresh the access token.")
|
||||
_LOGGER.debug("Calling LWA to refresh the access token")
|
||||
return await self._async_request_new_token(lwa_params)
|
||||
|
||||
@callback
|
||||
|
@ -113,14 +113,14 @@ class Auth:
|
|||
)
|
||||
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||
_LOGGER.error("Timeout calling LWA to get auth token.")
|
||||
_LOGGER.error("Timeout calling LWA to get auth token")
|
||||
return None
|
||||
|
||||
_LOGGER.debug("LWA response header: %s", response.headers)
|
||||
_LOGGER.debug("LWA response status: %s", response.status)
|
||||
|
||||
if response.status != HTTP_OK:
|
||||
_LOGGER.error("Error calling LWA to get auth token.")
|
||||
_LOGGER.error("Error calling LWA to get auth token")
|
||||
return None
|
||||
|
||||
response_json = await response.json()
|
||||
|
|
|
@ -101,7 +101,7 @@ async def async_send_changereport_message(
|
|||
)
|
||||
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||
_LOGGER.error("Timeout sending report to Alexa.")
|
||||
_LOGGER.error("Timeout sending report to Alexa")
|
||||
return
|
||||
|
||||
response_text = await response.text()
|
||||
|
@ -233,7 +233,7 @@ async def async_send_doorbell_event_message(hass, config, alexa_entity):
|
|||
)
|
||||
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||
_LOGGER.error("Timeout sending report to Alexa.")
|
||||
_LOGGER.error("Timeout sending report to Alexa")
|
||||
return
|
||||
|
||||
response_text = await response.text()
|
||||
|
|
|
@ -48,7 +48,7 @@ def setup(hass, config):
|
|||
try:
|
||||
apcups_data.update(no_throttle=True)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Failure while testing APCUPSd status retrieval.")
|
||||
_LOGGER.exception("Failure while testing APCUPSd status retrieval")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ def setup_scanner(hass, config, see, discovery_info=None):
|
|||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, aprs_disconnect)
|
||||
|
||||
if not aprs_listener.start_event.wait(timeout):
|
||||
_LOGGER.error("Timeout waiting for APRS to connect.")
|
||||
_LOGGER.error("Timeout waiting for APRS to connect")
|
||||
return
|
||||
|
||||
if not aprs_listener.start_success:
|
||||
|
@ -141,7 +141,7 @@ class AprsListenerThread(threading.Thread):
|
|||
|
||||
try:
|
||||
_LOGGER.info(
|
||||
"Opening connection to %s with callsign %s.", self.host, self.callsign
|
||||
"Opening connection to %s with callsign %s", self.host, self.callsign
|
||||
)
|
||||
self.ais.connect()
|
||||
self.start_complete(
|
||||
|
@ -152,7 +152,7 @@ class AprsListenerThread(threading.Thread):
|
|||
self.start_complete(False, str(err))
|
||||
except OSError:
|
||||
_LOGGER.info(
|
||||
"Closing connection to %s with callsign %s.", self.host, self.callsign
|
||||
"Closing connection to %s with callsign %s", self.host, self.callsign
|
||||
)
|
||||
|
||||
def stop(self):
|
||||
|
|
|
@ -59,7 +59,7 @@ def setup(hass, config):
|
|||
if arlo_base_station is not None:
|
||||
arlo_base_station.refresh_rate = scan_interval.total_seconds()
|
||||
elif not arlo.cameras:
|
||||
_LOGGER.error("No Arlo camera or base station available.")
|
||||
_LOGGER.error("No Arlo camera or base station available")
|
||||
return False
|
||||
|
||||
hass.data[DATA_ARLO] = arlo
|
||||
|
|
|
@ -103,7 +103,7 @@ async def async_setup(hass, config, retry_delay=FIRST_RETRY_TIME):
|
|||
return True
|
||||
|
||||
if not api.is_connected:
|
||||
_LOGGER.error("Error connecting %s to %s.", DOMAIN, conf[CONF_HOST])
|
||||
_LOGGER.error("Error connecting %s to %s", DOMAIN, conf[CONF_HOST])
|
||||
return False
|
||||
|
||||
hass.data[DATA_ASUSWRT] = api
|
||||
|
|
|
@ -143,7 +143,7 @@ class AtomeData:
|
|||
values = self.atome_client.get_consumption(DAILY_TYPE)
|
||||
self._day_usage = values["total"] / 1000
|
||||
self._day_price = values["price"]
|
||||
_LOGGER.debug("Updating Atome daily data. Got: %d.", self._day_usage)
|
||||
_LOGGER.debug("Updating Atome daily data. Got: %d", self._day_usage)
|
||||
|
||||
except KeyError as error:
|
||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||
|
@ -165,7 +165,7 @@ class AtomeData:
|
|||
values = self.atome_client.get_consumption(WEEKLY_TYPE)
|
||||
self._week_usage = values["total"] / 1000
|
||||
self._week_price = values["price"]
|
||||
_LOGGER.debug("Updating Atome weekly data. Got: %d.", self._week_usage)
|
||||
_LOGGER.debug("Updating Atome weekly data. Got: %d", self._week_usage)
|
||||
|
||||
except KeyError as error:
|
||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||
|
@ -187,7 +187,7 @@ class AtomeData:
|
|||
values = self.atome_client.get_consumption(MONTHLY_TYPE)
|
||||
self._month_usage = values["total"] / 1000
|
||||
self._month_price = values["price"]
|
||||
_LOGGER.debug("Updating Atome monthly data. Got: %d.", self._month_usage)
|
||||
_LOGGER.debug("Updating Atome monthly data. Got: %d", self._month_usage)
|
||||
|
||||
except KeyError as error:
|
||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||
|
@ -209,7 +209,7 @@ class AtomeData:
|
|||
values = self.atome_client.get_consumption(YEARLY_TYPE)
|
||||
self._year_usage = values["total"] / 1000
|
||||
self._year_price = values["price"]
|
||||
_LOGGER.debug("Updating Atome yearly data. Got: %d.", self._year_usage)
|
||||
_LOGGER.debug("Updating Atome yearly data. Got: %d", self._year_usage)
|
||||
|
||||
except KeyError as error:
|
||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||
|
|
|
@ -60,7 +60,7 @@ async def async_request_validation(hass, config_entry, august_gateway):
|
|||
# In the future this should start a new config flow
|
||||
# instead of using the legacy configurator
|
||||
#
|
||||
_LOGGER.error("Access token is no longer valid.")
|
||||
_LOGGER.error("Access token is no longer valid")
|
||||
configurator = hass.components.configurator
|
||||
entry_id = config_entry.entry_id
|
||||
|
||||
|
@ -351,7 +351,7 @@ class AugustData(AugustSubscriberMixin):
|
|||
doorbell_detail = self._device_detail_by_id.get(device_id)
|
||||
if doorbell_detail is None:
|
||||
_LOGGER.info(
|
||||
"The doorbell %s could not be setup because the system could not fetch details about the doorbell.",
|
||||
"The doorbell %s could not be setup because the system could not fetch details about the doorbell",
|
||||
doorbell.device_name,
|
||||
)
|
||||
else:
|
||||
|
@ -373,17 +373,17 @@ class AugustData(AugustSubscriberMixin):
|
|||
lock_detail = self._device_detail_by_id.get(device_id)
|
||||
if lock_detail is None:
|
||||
_LOGGER.info(
|
||||
"The lock %s could not be setup because the system could not fetch details about the lock.",
|
||||
"The lock %s could not be setup because the system could not fetch details about the lock",
|
||||
lock.device_name,
|
||||
)
|
||||
elif lock_detail.bridge is None:
|
||||
_LOGGER.info(
|
||||
"The lock %s could not be setup because it does not have a bridge (Connect).",
|
||||
"The lock %s could not be setup because it does not have a bridge (Connect)",
|
||||
lock.device_name,
|
||||
)
|
||||
elif not lock_detail.bridge.operative:
|
||||
_LOGGER.info(
|
||||
"The lock %s could not be setup because the bridge (Connect) is not operative.",
|
||||
"The lock %s could not be setup because the bridge (Connect) is not operative",
|
||||
lock.device_name,
|
||||
)
|
||||
else:
|
||||
|
|
|
@ -88,7 +88,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
|||
detail = data.get_device_detail(door.device_id)
|
||||
if not detail.doorsense:
|
||||
_LOGGER.debug(
|
||||
"Not adding sensor class door for lock %s because it does not have doorsense.",
|
||||
"Not adding sensor class door for lock %s because it does not have doorsense",
|
||||
door.device_name,
|
||||
)
|
||||
continue
|
||||
|
|
|
@ -338,7 +338,7 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
|||
else:
|
||||
enable_automation = DEFAULT_INITIAL_STATE
|
||||
_LOGGER.debug(
|
||||
"Automation %s not in state storage, state %s from default is used.",
|
||||
"Automation %s not in state storage, state %s from default is used",
|
||||
self.entity_id,
|
||||
enable_automation,
|
||||
)
|
||||
|
|
|
@ -145,7 +145,7 @@ class BMWConnectedDriveAccount:
|
|||
except OSError as exception:
|
||||
_LOGGER.error(
|
||||
"Could not connect to the BMW Connected Drive portal. "
|
||||
"The vehicle state could not be updated."
|
||||
"The vehicle state could not be updated"
|
||||
)
|
||||
_LOGGER.exception(exception)
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
try:
|
||||
things = bapi.getThings()["things"]
|
||||
if not things:
|
||||
_LOGGER.error("No things present in account.")
|
||||
_LOGGER.error("No things present in account")
|
||||
else:
|
||||
add_entities(
|
||||
[
|
||||
|
|
|
@ -76,7 +76,7 @@ class BrData:
|
|||
|
||||
async def schedule_update(self, minute=1):
|
||||
"""Schedule an update after minute minutes."""
|
||||
_LOGGER.debug("Scheduling next update in %s minutes.", minute)
|
||||
_LOGGER.debug("Scheduling next update in %s minutes", minute)
|
||||
nxt = dt_util.utcnow() + timedelta(minutes=minute)
|
||||
async_track_point_in_utc_time(self.hass, self.async_update, nxt)
|
||||
|
||||
|
@ -115,8 +115,7 @@ class BrData:
|
|||
self.load_error_count += 1
|
||||
threshold_log(
|
||||
self.load_error_count,
|
||||
"Unable to retrieve json data from Buienradar."
|
||||
"(Msg: %s, status: %s,)",
|
||||
"Unable to retrieve json data from Buienradar" "(Msg: %s, status: %s,)",
|
||||
content.get(MESSAGE),
|
||||
content.get(STATUS_CODE),
|
||||
)
|
||||
|
@ -136,7 +135,7 @@ class BrData:
|
|||
# unable to get the data
|
||||
threshold_log(
|
||||
self.rain_error_count,
|
||||
"Unable to retrieve rain data from Buienradar." "(Msg: %s, status: %s)",
|
||||
"Unable to retrieve rain data from Buienradar" "(Msg: %s, status: %s)",
|
||||
raincontent.get(MESSAGE),
|
||||
raincontent.get(STATUS_CODE),
|
||||
)
|
||||
|
|
|
@ -493,7 +493,7 @@ class CameraView(HomeAssistantView):
|
|||
raise web.HTTPUnauthorized()
|
||||
|
||||
if not camera.is_on:
|
||||
_LOGGER.debug("Camera is off.")
|
||||
_LOGGER.debug("Camera is off")
|
||||
raise web.HTTPServiceUnavailable()
|
||||
|
||||
return await self.handle(request, camera)
|
||||
|
@ -549,7 +549,7 @@ async def websocket_camera_thumbnail(hass, connection, msg):
|
|||
|
||||
Async friendly.
|
||||
"""
|
||||
_LOGGER.warning("The websocket command 'camera_thumbnail' has been deprecated.")
|
||||
_LOGGER.warning("The websocket command 'camera_thumbnail' has been deprecated")
|
||||
try:
|
||||
image = await async_get_image(hass, msg["entity_id"])
|
||||
await connection.send_big_result(
|
||||
|
|
|
@ -104,7 +104,7 @@ def setup_internal_discovery(hass: HomeAssistant) -> None:
|
|||
),
|
||||
)
|
||||
|
||||
_LOGGER.debug("Starting internal pychromecast discovery.")
|
||||
_LOGGER.debug("Starting internal pychromecast discovery")
|
||||
listener = pychromecast.CastListener(
|
||||
internal_add_update_callback,
|
||||
internal_remove_callback,
|
||||
|
@ -114,7 +114,7 @@ def setup_internal_discovery(hass: HomeAssistant) -> None:
|
|||
|
||||
def stop_discovery(event):
|
||||
"""Stop discovery of new chromecasts."""
|
||||
_LOGGER.debug("Stopping internal pychromecast discovery.")
|
||||
_LOGGER.debug("Stopping internal pychromecast discovery")
|
||||
pychromecast.discovery.stop_discovery(browser)
|
||||
hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].release()
|
||||
|
||||
|
|
|
@ -133,7 +133,7 @@ async def async_setup_platform(
|
|||
_LOGGER.warning(
|
||||
"Setting configuration for Cast via platform is deprecated. "
|
||||
"Configure via Cast integration instead."
|
||||
"This option will become invalid in version 0.116."
|
||||
"This option will become invalid in version 0.116"
|
||||
)
|
||||
await _async_setup_platform(hass, config, async_add_entities, discovery_info)
|
||||
|
||||
|
@ -306,7 +306,7 @@ class CastDevice(MediaPlayerEntity):
|
|||
# Can't disconnect if not connected.
|
||||
return
|
||||
_LOGGER.debug(
|
||||
"[%s %s] Disconnecting from chromecast socket.",
|
||||
"[%s %s] Disconnecting from chromecast socket",
|
||||
self.entity_id,
|
||||
self._cast_info.friendly_name,
|
||||
)
|
||||
|
@ -479,7 +479,7 @@ class CastDevice(MediaPlayerEntity):
|
|||
self._chromecast.start_app(app_id)
|
||||
if app_data:
|
||||
_LOGGER.warning(
|
||||
"Extra keys %s were ignored. Please use app_name to cast media.",
|
||||
"Extra keys %s were ignored. Please use app_name to cast media",
|
||||
app_data.keys(),
|
||||
)
|
||||
return
|
||||
|
|
|
@ -64,7 +64,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
_LOGGER.warning(
|
||||
"Currency ID %s or display currency %s "
|
||||
"is not available. Using 1 (bitcoin) "
|
||||
"and USD.",
|
||||
"and USD",
|
||||
currency_id,
|
||||
display_currency,
|
||||
)
|
||||
|
|
|
@ -83,9 +83,9 @@ class ZWaveConfigWriteView(HomeAssistantView):
|
|||
network = hass.data.get(const.DATA_NETWORK)
|
||||
if network is None:
|
||||
return self.json_message("No Z-Wave network data found", HTTP_NOT_FOUND)
|
||||
_LOGGER.info("Z-Wave configuration written to file.")
|
||||
_LOGGER.info("Z-Wave configuration written to file")
|
||||
network.write_config()
|
||||
return self.json_message("Z-Wave configuration saved to file.", HTTP_OK)
|
||||
return self.json_message("Z-Wave configuration saved to file", HTTP_OK)
|
||||
|
||||
|
||||
class ZWaveNodeValueView(HomeAssistantView):
|
||||
|
|
|
@ -66,7 +66,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
|
||||
add_entities(DecoraWifiLight(sw) for sw in all_switches)
|
||||
except ValueError:
|
||||
_LOGGER.error("Failed to communicate with myLeviton Service.")
|
||||
_LOGGER.error("Failed to communicate with myLeviton Service")
|
||||
|
||||
# Listen for the stop event and log out.
|
||||
def logout(event):
|
||||
|
@ -75,7 +75,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
if session is not None:
|
||||
Person.logout(session)
|
||||
except ValueError:
|
||||
_LOGGER.error("Failed to log out of myLeviton Service.")
|
||||
_LOGGER.error("Failed to log out of myLeviton Service")
|
||||
|
||||
hass.bus.listen(EVENT_HOMEASSISTANT_STOP, logout)
|
||||
|
||||
|
@ -127,7 +127,7 @@ class DecoraWifiLight(LightEntity):
|
|||
try:
|
||||
self._switch.update_attributes(attribs)
|
||||
except ValueError:
|
||||
_LOGGER.error("Failed to turn on myLeviton switch.")
|
||||
_LOGGER.error("Failed to turn on myLeviton switch")
|
||||
|
||||
def turn_off(self, **kwargs):
|
||||
"""Instruct the switch to turn off."""
|
||||
|
@ -135,11 +135,11 @@ class DecoraWifiLight(LightEntity):
|
|||
try:
|
||||
self._switch.update_attributes(attribs)
|
||||
except ValueError:
|
||||
_LOGGER.error("Failed to turn off myLeviton switch.")
|
||||
_LOGGER.error("Failed to turn off myLeviton switch")
|
||||
|
||||
def update(self):
|
||||
"""Fetch new state data for this switch."""
|
||||
try:
|
||||
self._switch.refresh()
|
||||
except ValueError:
|
||||
_LOGGER.error("Failed to update myLeviton switch data.")
|
||||
_LOGGER.error("Failed to update myLeviton switch data")
|
||||
|
|
|
@ -94,7 +94,7 @@ async def async_setup(hass: HomeAssistant, config: dict):
|
|||
doorstation = get_doorstation_by_token(hass, token)
|
||||
|
||||
if doorstation is None:
|
||||
_LOGGER.error("Device not found for provided token.")
|
||||
_LOGGER.error("Device not found for provided token")
|
||||
return
|
||||
|
||||
# Clear webhooks
|
||||
|
|
|
@ -107,7 +107,7 @@ async def async_setup(hass, config):
|
|||
partner = conf.get(CONF_PARTNER)
|
||||
|
||||
if hass.config.time_zone is None:
|
||||
_LOGGER.error("Timezone is not set in Home Assistant.")
|
||||
_LOGGER.error("Timezone is not set in Home Assistant")
|
||||
return False
|
||||
|
||||
timezone = str(hass.config.time_zone)
|
||||
|
|
|
@ -168,6 +168,6 @@ USN: {unique_service_name}
|
|||
|
||||
def clean_socket_close(sock):
|
||||
"""Close a socket connection and logs its closure."""
|
||||
_LOGGER.info("UPNP responder shutting down.")
|
||||
_LOGGER.info("UPNP responder shutting down")
|
||||
|
||||
sock.close()
|
||||
|
|
|
@ -53,7 +53,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
try:
|
||||
envirophat = importlib.import_module("envirophat")
|
||||
except OSError:
|
||||
_LOGGER.error("No Enviro pHAT was found.")
|
||||
_LOGGER.error("No Enviro pHAT was found")
|
||||
return False
|
||||
|
||||
data = EnvirophatData(envirophat, config.get(CONF_USE_LEDS))
|
||||
|
|
|
@ -194,7 +194,7 @@ async def async_setup(hass, config):
|
|||
controller.callback_login_timeout = connection_fail_callback
|
||||
controller.callback_login_success = connection_success_callback
|
||||
|
||||
_LOGGER.info("Start envisalink.")
|
||||
_LOGGER.info("Start envisalink")
|
||||
controller.start()
|
||||
|
||||
result = await sync_connect
|
||||
|
|
|
@ -160,9 +160,9 @@ class EverLightsLight(LightEntity):
|
|||
self._status = await self._api.get_status()
|
||||
except pyeverlights.ConnectionError:
|
||||
if self._available:
|
||||
_LOGGER.warning("EverLights control box connection lost.")
|
||||
_LOGGER.warning("EverLights control box connection lost")
|
||||
self._available = False
|
||||
else:
|
||||
if not self._available:
|
||||
_LOGGER.warning("EverLights control box connection restored.")
|
||||
_LOGGER.warning("EverLights control box connection restored")
|
||||
self._available = True
|
||||
|
|
|
@ -161,14 +161,14 @@ def _handle_exception(err) -> bool:
|
|||
if err.status == HTTP_SERVICE_UNAVAILABLE:
|
||||
_LOGGER.warning(
|
||||
"The vendor says their server is currently unavailable. "
|
||||
"Check the vendor's service status page."
|
||||
"Check the vendor's service status page"
|
||||
)
|
||||
return False
|
||||
|
||||
if err.status == HTTP_TOO_MANY_REQUESTS:
|
||||
_LOGGER.warning(
|
||||
"The vendor's API rate limit has been exceeded. "
|
||||
"If this message persists, consider increasing the %s.",
|
||||
"If this message persists, consider increasing the %s",
|
||||
CONF_SCAN_INTERVAL,
|
||||
)
|
||||
return False
|
||||
|
@ -221,7 +221,7 @@ async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
|
|||
except IndexError:
|
||||
_LOGGER.error(
|
||||
"Config error: '%s' = %s, but the valid range is 0-%s. "
|
||||
"Unable to continue. Fix any configuration errors and restart HA.",
|
||||
"Unable to continue. Fix any configuration errors and restart HA",
|
||||
CONF_LOCATION_IDX,
|
||||
loc_idx,
|
||||
len(client_v2.installation_info) - 1,
|
||||
|
|
|
@ -134,7 +134,7 @@ class FibaroController:
|
|||
info = self._client.info.get()
|
||||
self.hub_serial = slugify(info.serialNumber)
|
||||
except AssertionError:
|
||||
_LOGGER.error("Can't connect to Fibaro HC. Please check URL.")
|
||||
_LOGGER.error("Can't connect to Fibaro HC. Please check URL")
|
||||
return False
|
||||
if login is None or login.status is False:
|
||||
_LOGGER.error(
|
||||
|
|
|
@ -87,10 +87,10 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||
FoobotClient.TooManyRequests,
|
||||
FoobotClient.InternalError,
|
||||
):
|
||||
_LOGGER.exception("Failed to connect to foobot servers.")
|
||||
_LOGGER.exception("Failed to connect to foobot servers")
|
||||
raise PlatformNotReady
|
||||
except FoobotClient.ClientError:
|
||||
_LOGGER.error("Failed to fetch data from foobot servers.")
|
||||
_LOGGER.error("Failed to fetch data from foobot servers")
|
||||
return
|
||||
async_add_entities(dev, True)
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ class FreeboxWifiSwitch(SwitchEntity):
|
|||
await self._router.wifi.set_global_config(wifi_config)
|
||||
except InsufficientPermissionsError:
|
||||
_LOGGER.warning(
|
||||
"Home Assistant does not have permissions to modify the Freebox settings. Please refer to documentation."
|
||||
"Home Assistant does not have permissions to modify the Freebox settings. Please refer to documentation"
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs):
|
||||
|
|
|
@ -349,7 +349,7 @@ def _async_setup_themes(hass, themes):
|
|||
hass.data[DATA_DEFAULT_THEME] = name
|
||||
update_theme_and_fire_event()
|
||||
else:
|
||||
_LOGGER.warning("Theme %s is not defined.", name)
|
||||
_LOGGER.warning("Theme %s is not defined", name)
|
||||
|
||||
async def reload_themes(_):
|
||||
"""Reload themes."""
|
||||
|
|
|
@ -34,7 +34,7 @@ async def async_setup_entry(
|
|||
) as err:
|
||||
_LOGGER.error("Error occurred during Garmin Connect Client update: %s", err)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unknown error occurred during Garmin Connect Client update.")
|
||||
_LOGGER.exception("Unknown error occurred during Garmin Connect Client update")
|
||||
|
||||
entities = []
|
||||
for (
|
||||
|
|
|
@ -222,7 +222,7 @@ def check_correct_scopes(token_file):
|
|||
"""Check for the correct scopes in file."""
|
||||
tokenfile = open(token_file).read()
|
||||
if "readonly" in tokenfile:
|
||||
_LOGGER.warning("Please re-authenticate with Google.")
|
||||
_LOGGER.warning("Please re-authenticate with Google")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
|
|
@ -106,7 +106,7 @@ async def async_setup(hass: HomeAssistant, yaml_config: Dict[str, Any]):
|
|||
|
||||
if agent_user_id is None:
|
||||
_LOGGER.warning(
|
||||
"No agent_user_id supplied for request_sync. Call as a user or pass in user id as agent_user_id."
|
||||
"No agent_user_id supplied for request_sync. Call as a user or pass in user id as agent_user_id"
|
||||
)
|
||||
return
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ class GoogleMapsScanner:
|
|||
|
||||
except InvalidCookies:
|
||||
_LOGGER.error(
|
||||
"The cookie file provided does not provide a valid session. Please create another one and try again."
|
||||
"The cookie file provided does not provide a valid session. Please create another one and try again"
|
||||
)
|
||||
self.success_init = False
|
||||
|
||||
|
|
|
@ -258,7 +258,7 @@ class HarmonyRemote(remote.RemoteEntity):
|
|||
_LOGGER.debug("%s: Connecting", self._name)
|
||||
try:
|
||||
if not await self._client.connect():
|
||||
_LOGGER.warning("%s: Unable to connect to HUB.", self._name)
|
||||
_LOGGER.warning("%s: Unable to connect to HUB", self._name)
|
||||
await self._client.close()
|
||||
return False
|
||||
except aioexc.TimeOut:
|
||||
|
@ -283,14 +283,14 @@ class HarmonyRemote(remote.RemoteEntity):
|
|||
|
||||
async def got_connected(self, _=None):
|
||||
"""Notification that we're connected to the HUB."""
|
||||
_LOGGER.debug("%s: connected to the HUB.", self._name)
|
||||
_LOGGER.debug("%s: connected to the HUB", self._name)
|
||||
if not self._available:
|
||||
# We were disconnected before.
|
||||
await self.new_config()
|
||||
|
||||
async def got_disconnected(self, _=None):
|
||||
"""Notification that we're disconnected from the HUB."""
|
||||
_LOGGER.debug("%s: disconnected from the HUB.", self._name)
|
||||
_LOGGER.debug("%s: disconnected from the HUB", self._name)
|
||||
self._available = False
|
||||
# We're going to wait for 10 seconds before announcing we're
|
||||
# unavailable, this to allow a reconnection to happen.
|
||||
|
|
|
@ -196,7 +196,7 @@ async def async_setup(hass, config):
|
|||
for env in ("HASSIO", "HASSIO_TOKEN"):
|
||||
if os.environ.get(env):
|
||||
continue
|
||||
_LOGGER.error("Missing %s environment variable.", env)
|
||||
_LOGGER.error("Missing %s environment variable", env)
|
||||
return False
|
||||
|
||||
host = os.environ["HASSIO"]
|
||||
|
|
|
@ -78,7 +78,7 @@ class HassIOBaseAuth(HomeAssistantView):
|
|||
if prv is not None:
|
||||
return prv
|
||||
|
||||
_LOGGER.error("Can't find Home Assistant auth.")
|
||||
_LOGGER.error("Can't find Home Assistant auth")
|
||||
raise HTTPNotFound()
|
||||
|
||||
|
||||
|
|
|
@ -176,7 +176,7 @@ class HassIO:
|
|||
)
|
||||
|
||||
if request.status not in (HTTP_OK, HTTP_BAD_REQUEST):
|
||||
_LOGGER.error("%s return code %d.", command, request.status)
|
||||
_LOGGER.error("%s return code %d", command, request.status)
|
||||
raise HassioAPIError()
|
||||
|
||||
answer = await request.json()
|
||||
|
|
|
@ -156,7 +156,7 @@ async def async_setup_platform(
|
|||
_are_valid_client_credentials, here_client
|
||||
):
|
||||
_LOGGER.error(
|
||||
"Invalid credentials. This error is returned if the specified token was invalid or no contract could be found for this token."
|
||||
"Invalid credentials. This error is returned if the specified token was invalid or no contract could be found for this token"
|
||||
)
|
||||
return
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ class ConfigEntryAuth(homeconnect.HomeConnectAPI):
|
|||
elif app.type == "Hob":
|
||||
device = Hob(self.hass, app)
|
||||
else:
|
||||
_LOGGER.warning("Appliance type %s not implemented.", app.type)
|
||||
_LOGGER.warning("Appliance type %s not implemented", app.type)
|
||||
continue
|
||||
devices.append({"device": device, "entities": device.get_entity_info()})
|
||||
self.devices = devices
|
||||
|
@ -93,15 +93,15 @@ class HomeConnectDevice:
|
|||
try:
|
||||
self.appliance.get_status()
|
||||
except (HomeConnectError, ValueError):
|
||||
_LOGGER.debug("Unable to fetch appliance status. Probably offline.")
|
||||
_LOGGER.debug("Unable to fetch appliance status. Probably offline")
|
||||
try:
|
||||
self.appliance.get_settings()
|
||||
except (HomeConnectError, ValueError):
|
||||
_LOGGER.debug("Unable to fetch settings. Probably offline.")
|
||||
_LOGGER.debug("Unable to fetch settings. Probably offline")
|
||||
try:
|
||||
program_active = self.appliance.get_programs_active()
|
||||
except (HomeConnectError, ValueError):
|
||||
_LOGGER.debug("Unable to fetch active programs. Probably offline.")
|
||||
_LOGGER.debug("Unable to fetch active programs. Probably offline")
|
||||
program_active = None
|
||||
if program_active and "key" in program_active:
|
||||
self.appliance.status[BSH_ACTIVE_PROGRAM] = {"value": program_active["key"]}
|
||||
|
|
|
@ -191,7 +191,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||
# If the previous instance hasn't cleaned up yet
|
||||
# we need to wait a bit
|
||||
if not await hass.async_add_executor_job(port_is_available, port):
|
||||
_LOGGER.warning("The local port %s is in use.", port)
|
||||
_LOGGER.warning("The local port %s is in use", port)
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
if CONF_ENTRY_INDEX in conf and conf[CONF_ENTRY_INDEX] == 0:
|
||||
|
@ -266,7 +266,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||
if not await hass.async_add_executor_job(
|
||||
port_is_available, entry.data[CONF_PORT]
|
||||
):
|
||||
_LOGGER.info("Waiting for the HomeKit server to shutdown.")
|
||||
_LOGGER.info("Waiting for the HomeKit server to shutdown")
|
||||
await asyncio.sleep(1)
|
||||
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
@ -310,7 +310,7 @@ def _async_register_events_and_services(hass: HomeAssistant):
|
|||
if homekit.status != STATUS_RUNNING:
|
||||
_LOGGER.warning(
|
||||
"HomeKit is not running. Either it is waiting to be "
|
||||
"started or has been stopped."
|
||||
"started or has been stopped"
|
||||
)
|
||||
continue
|
||||
|
||||
|
@ -336,7 +336,7 @@ def _async_register_events_and_services(hass: HomeAssistant):
|
|||
if homekit.status != STATUS_READY:
|
||||
_LOGGER.warning(
|
||||
"HomeKit is not ready. Either it is already starting up or has "
|
||||
"been stopped."
|
||||
"been stopped"
|
||||
)
|
||||
continue
|
||||
await homekit.async_start()
|
||||
|
@ -436,7 +436,7 @@ class HomeKit:
|
|||
# The bridge itself counts as an accessory
|
||||
if len(self.bridge.accessories) + 1 >= MAX_DEVICES:
|
||||
_LOGGER.warning(
|
||||
"Cannot add %s as this would exceeded the %d device limit. Consider using the filter option.",
|
||||
"Cannot add %s as this would exceeded the %d device limit. Consider using the filter option",
|
||||
state.entity_id,
|
||||
MAX_DEVICES,
|
||||
)
|
||||
|
|
|
@ -129,7 +129,7 @@ def get_accessory(hass, driver, state, aid, config):
|
|||
if not aid:
|
||||
_LOGGER.warning(
|
||||
'The entity "%s" is not supported, since it '
|
||||
"generates an invalid aid, please change it.",
|
||||
"generates an invalid aid, please change it",
|
||||
state.entity_id,
|
||||
)
|
||||
return None
|
||||
|
|
|
@ -61,6 +61,6 @@ class TurboJPEGSingleton:
|
|||
TurboJPEGSingleton.__instance = TurboJPEG()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"libturbojpeg is not installed, cameras may impact HomeKit performance."
|
||||
"libturbojpeg is not installed, cameras may impact HomeKit performance"
|
||||
)
|
||||
TurboJPEGSingleton.__instance = False
|
||||
|
|
|
@ -357,17 +357,17 @@ class Camera(HomeAccessory, PyhapCamera):
|
|||
self._async_stop_ffmpeg_watch()
|
||||
|
||||
if not pid_is_alive(stream.process.pid):
|
||||
_LOGGER.info("[%s] Stream already stopped.", session_id)
|
||||
_LOGGER.info("[%s] Stream already stopped", session_id)
|
||||
return True
|
||||
|
||||
for shutdown_method in ["close", "kill"]:
|
||||
_LOGGER.info("[%s] %s stream.", session_id, shutdown_method)
|
||||
_LOGGER.info("[%s] %s stream", session_id, shutdown_method)
|
||||
try:
|
||||
await getattr(stream, shutdown_method)()
|
||||
return
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"[%s] Failed to %s stream.", session_id, shutdown_method
|
||||
"[%s] Failed to %s stream", session_id, shutdown_method
|
||||
)
|
||||
|
||||
async def reconfigure_stream(self, session_info, stream_config):
|
||||
|
|
|
@ -336,7 +336,7 @@ class TelevisionMediaPlayer(HomeAccessory):
|
|||
input_type = 3 if "hdmi" in source.lower() else 0
|
||||
serv_input.configure_char(CHAR_INPUT_SOURCE_TYPE, value=input_type)
|
||||
serv_input.configure_char(CHAR_CURRENT_VISIBILITY_STATE, value=False)
|
||||
_LOGGER.debug("%s: Added source %s.", self.entity_id, source)
|
||||
_LOGGER.debug("%s: Added source %s", self.entity_id, source)
|
||||
|
||||
self.async_update_state(state)
|
||||
|
||||
|
|
|
@ -334,7 +334,7 @@ class Thermostat(HomeAccessory):
|
|||
if not hc_modes:
|
||||
# This cannot be none OR an empty list
|
||||
_LOGGER.error(
|
||||
"%s: HVAC modes not yet available. Please disable auto start for homekit.",
|
||||
"%s: HVAC modes not yet available. Please disable auto start for homekit",
|
||||
self.entity_id,
|
||||
)
|
||||
hc_modes = (
|
||||
|
|
|
@ -306,7 +306,7 @@ class HomeKitSpeedMapping:
|
|||
_LOGGER.warning(
|
||||
"%s does not contain the speed setting "
|
||||
"%s as its first element. "
|
||||
"Assuming that %s is equivalent to 'off'.",
|
||||
"Assuming that %s is equivalent to 'off'",
|
||||
speed_list,
|
||||
fan.SPEED_OFF,
|
||||
speed_list[0],
|
||||
|
|
|
@ -275,7 +275,7 @@ class HKDevice:
|
|||
async def async_update(self, now=None):
|
||||
"""Poll state of all entities attached to this bridge/accessory."""
|
||||
if not self.pollable_characteristics:
|
||||
_LOGGER.debug("HomeKit connection not polling any characteristics.")
|
||||
_LOGGER.debug("HomeKit connection not polling any characteristics")
|
||||
return
|
||||
|
||||
if self._polling_lock.locked():
|
||||
|
|
|
@ -105,7 +105,7 @@ class HomematicipAlarmControlPanelEntity(AlarmControlPanelEntity):
|
|||
self.async_write_ha_state()
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Device Changed Event for %s (Alarm Control Panel) not fired. Entity is disabled.",
|
||||
"Device Changed Event for %s (Alarm Control Panel) not fired. Entity is disabled",
|
||||
self.name,
|
||||
)
|
||||
|
||||
|
|
|
@ -111,7 +111,7 @@ class HomematicipGenericDevice(Entity):
|
|||
self.async_write_ha_state()
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Device Changed Event for %s (%s) not fired. Entity is disabled.",
|
||||
"Device Changed Event for %s (%s) not fired. Entity is disabled",
|
||||
self.name,
|
||||
self._device.modelType,
|
||||
)
|
||||
|
|
|
@ -45,7 +45,7 @@ class RequestDataValidator:
|
|||
data = await request.json()
|
||||
except ValueError:
|
||||
if not self._allow_empty or (await request.content.read()) != b"":
|
||||
_LOGGER.error("Invalid JSON received.")
|
||||
_LOGGER.error("Invalid JSON received")
|
||||
return view.json_message("Invalid JSON.", HTTP_BAD_REQUEST)
|
||||
data = {}
|
||||
|
||||
|
|
|
@ -153,7 +153,7 @@ class HueBridge:
|
|||
client_exceptions.ServerDisconnectedError,
|
||||
) as err:
|
||||
if tries == 3:
|
||||
_LOGGER.error("Request failed %s times, giving up.", tries)
|
||||
_LOGGER.error("Request failed %s times, giving up", tries)
|
||||
raise
|
||||
|
||||
# We only retry if it's a server error. So raise on all 4XX errors.
|
||||
|
|
|
@ -76,7 +76,7 @@ async def async_setup(hass, config):
|
|||
for target, key in target_keys.items():
|
||||
res = pyfttt.send_event(key, event, value1, value2, value3)
|
||||
if res.status_code != HTTP_OK:
|
||||
_LOGGER.error("IFTTT reported error sending event to %s.", target)
|
||||
_LOGGER.error("IFTTT reported error sending event to %s", target)
|
||||
except requests.exceptions.RequestException:
|
||||
_LOGGER.exception("Error communicating with IFTTT")
|
||||
|
||||
|
|
|
@ -155,7 +155,7 @@ class IslamicPrayerClient:
|
|||
self.available = True
|
||||
except (exceptions.InvalidResponseError, ConnError):
|
||||
self.available = False
|
||||
_LOGGER.debug("Error retrieving prayer times.")
|
||||
_LOGGER.debug("Error retrieving prayer times")
|
||||
async_call_later(self.hass, 60, self.async_update)
|
||||
return
|
||||
|
||||
|
@ -165,7 +165,7 @@ class IslamicPrayerClient:
|
|||
)
|
||||
await self.async_schedule_future_update()
|
||||
|
||||
_LOGGER.debug("New prayer times retrieved. Updating sensors.")
|
||||
_LOGGER.debug("New prayer times retrieved. Updating sensors")
|
||||
async_dispatcher_send(self.hass, DATA_UPDATED)
|
||||
|
||||
async def async_setup(self):
|
||||
|
|
|
@ -181,7 +181,7 @@ async def async_setup_entry(
|
|||
|
||||
def _start_auto_update() -> None:
|
||||
"""Start isy auto update."""
|
||||
_LOGGER.debug("ISY Starting Event Stream and automatic updates.")
|
||||
_LOGGER.debug("ISY Starting Event Stream and automatic updates")
|
||||
isy.auto_update = True
|
||||
|
||||
await hass.async_add_executor_job(_start_auto_update)
|
||||
|
@ -257,7 +257,7 @@ async def async_unload_entry(
|
|||
|
||||
def _stop_auto_update() -> None:
|
||||
"""Start isy auto update."""
|
||||
_LOGGER.debug("ISY Stopping Event Stream and automatic updates.")
|
||||
_LOGGER.debug("ISY Stopping Event Stream and automatic updates")
|
||||
isy.auto_update = False
|
||||
|
||||
await hass.async_add_executor_job(_stop_auto_update)
|
||||
|
|
|
@ -107,7 +107,7 @@ async def async_setup_entry(
|
|||
if not parent_device:
|
||||
_LOGGER.error(
|
||||
"Node %s has a parent node %s, but no device "
|
||||
"was created for the parent. Skipping.",
|
||||
"was created for the parent. Skipping",
|
||||
node.address,
|
||||
node.parent_node,
|
||||
)
|
||||
|
|
|
@ -157,7 +157,7 @@ class ISYNodeEntity(ISYEntity):
|
|||
"""Respond to an entity service command call."""
|
||||
if not hasattr(self._node, command):
|
||||
_LOGGER.error(
|
||||
"Invalid Service Call %s for device %s.", command, self.entity_id
|
||||
"Invalid Service Call %s for device %s", command, self.entity_id
|
||||
)
|
||||
return
|
||||
getattr(self._node, command)()
|
||||
|
@ -168,7 +168,7 @@ class ISYNodeEntity(ISYEntity):
|
|||
"""Respond to an entity service raw command call."""
|
||||
if not hasattr(self._node, "send_cmd"):
|
||||
_LOGGER.error(
|
||||
"Invalid Service Call %s for device %s.", command, self.entity_id
|
||||
"Invalid Service Call %s for device %s", command, self.entity_id
|
||||
)
|
||||
return
|
||||
self._node.send_cmd(command, value, unit_of_measurement, parameters)
|
||||
|
|
|
@ -330,7 +330,7 @@ def _categorize_programs(hass_isy_data: dict, programs: Programs) -> None:
|
|||
status = entity_folder.get_by_name(KEY_STATUS)
|
||||
if not status or not status.protocol == PROTO_PROGRAM:
|
||||
_LOGGER.warning(
|
||||
"Program %s entity '%s' not loaded, invalid/missing status program.",
|
||||
"Program %s entity '%s' not loaded, invalid/missing status program",
|
||||
platform,
|
||||
entity_folder.name,
|
||||
)
|
||||
|
@ -340,7 +340,7 @@ def _categorize_programs(hass_isy_data: dict, programs: Programs) -> None:
|
|||
actions = entity_folder.get_by_name(KEY_ACTIONS)
|
||||
if not actions or not actions.protocol == PROTO_PROGRAM:
|
||||
_LOGGER.warning(
|
||||
"Program %s entity '%s' not loaded, invalid/missing actions program.",
|
||||
"Program %s entity '%s' not loaded, invalid/missing actions program",
|
||||
platform,
|
||||
entity_folder.name,
|
||||
)
|
||||
|
|
|
@ -211,7 +211,7 @@ def async_setup_services(hass: HomeAssistantType):
|
|||
await hass.async_add_executor_job(command.run)
|
||||
return
|
||||
_LOGGER.error(
|
||||
"Could not run network resource command. Not found or enabled on the ISY."
|
||||
"Could not run network resource command. Not found or enabled on the ISY"
|
||||
)
|
||||
|
||||
async def async_send_program_command_service_handler(service):
|
||||
|
@ -233,9 +233,7 @@ def async_setup_services(hass: HomeAssistantType):
|
|||
if program is not None:
|
||||
await hass.async_add_executor_job(getattr(program, command))
|
||||
return
|
||||
_LOGGER.error(
|
||||
"Could not send program command. Not found or enabled on the ISY."
|
||||
)
|
||||
_LOGGER.error("Could not send program command. Not found or enabled on the ISY")
|
||||
|
||||
async def async_set_variable_service_handler(service):
|
||||
"""Handle a set variable service call."""
|
||||
|
@ -258,7 +256,7 @@ def async_setup_services(hass: HomeAssistantType):
|
|||
if variable is not None:
|
||||
await hass.async_add_executor_job(variable.set_value, value, init)
|
||||
return
|
||||
_LOGGER.error("Could not set variable value. Not found or enabled on the ISY.")
|
||||
_LOGGER.error("Could not set variable value. Not found or enabled on the ISY")
|
||||
|
||||
async def async_cleanup_registry_entries(service) -> None:
|
||||
"""Remove extra entities that are no longer part of the integration."""
|
||||
|
@ -369,7 +367,7 @@ def async_unload_services(hass: HomeAssistantType):
|
|||
):
|
||||
return
|
||||
|
||||
_LOGGER.info("Unloading ISY994 Services.")
|
||||
_LOGGER.info("Unloading ISY994 Services")
|
||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SYSTEM_QUERY)
|
||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_RUN_NETWORK_RESOURCE)
|
||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_PROGRAM_COMMAND)
|
||||
|
|
|
@ -45,12 +45,12 @@ class ISYSwitchEntity(ISYNodeEntity, SwitchEntity):
|
|||
def turn_off(self, **kwargs) -> None:
|
||||
"""Send the turn off command to the ISY994 switch."""
|
||||
if not self._node.turn_off():
|
||||
_LOGGER.debug("Unable to turn off switch.")
|
||||
_LOGGER.debug("Unable to turn off switch")
|
||||
|
||||
def turn_on(self, **kwargs) -> None:
|
||||
"""Send the turn on command to the ISY994 switch."""
|
||||
if not self._node.turn_on():
|
||||
_LOGGER.debug("Unable to turn on switch.")
|
||||
_LOGGER.debug("Unable to turn on switch")
|
||||
|
||||
@property
|
||||
def icon(self) -> str:
|
||||
|
|
|
@ -42,7 +42,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
available_locks = kiwi.get_locks()
|
||||
if not available_locks:
|
||||
# No locks found; abort setup routine.
|
||||
_LOGGER.info("No KIWI locks found in your account.")
|
||||
_LOGGER.info("No KIWI locks found in your account")
|
||||
return
|
||||
add_entities([KiwiLock(lock, kiwi) for lock in available_locks], True)
|
||||
|
||||
|
|
|
@ -346,7 +346,7 @@ class KonnectedView(HomeAssistantView):
|
|||
_LOGGER.error(
|
||||
"Your Konnected device software may be out of "
|
||||
"date. Visit https://help.konnected.io for "
|
||||
"updating instructions."
|
||||
"updating instructions"
|
||||
)
|
||||
|
||||
device = data[CONF_DEVICES].get(device_id)
|
||||
|
|
|
@ -248,7 +248,7 @@ async def async_setup(hass, config):
|
|||
connections.append(connection)
|
||||
_LOGGER.info('LCN connected to "%s"', connection_name)
|
||||
except TimeoutError:
|
||||
_LOGGER.error('Connection to PCHK server "%s" failed.', connection_name)
|
||||
_LOGGER.error('Connection to PCHK server "%s" failed', connection_name)
|
||||
return False
|
||||
|
||||
hass.data[DATA_LCN][CONF_CONNECTIONS] = connections
|
||||
|
|
|
@ -163,7 +163,7 @@ def aiolifx_effects():
|
|||
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Set up the LIFX light platform. Obsolete."""
|
||||
_LOGGER.warning("LIFX no longer works with light platform configuration.")
|
||||
_LOGGER.warning("LIFX no longer works with light platform configuration")
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
|
|
|
@ -225,7 +225,7 @@ async def create_yaml_resource_col(hass, yaml_resources):
|
|||
else:
|
||||
if CONF_RESOURCES in ll_conf:
|
||||
_LOGGER.warning(
|
||||
"Resources need to be specified in your configuration.yaml. Please see the docs."
|
||||
"Resources need to be specified in your configuration.yaml. Please see the docs"
|
||||
)
|
||||
yaml_resources = ll_conf[CONF_RESOURCES]
|
||||
|
||||
|
|
|
@ -289,7 +289,7 @@ class MatrixBot:
|
|||
if self._mx_id in self._auth_tokens:
|
||||
try:
|
||||
client = self._login_by_token()
|
||||
_LOGGER.debug("Logged in using stored token.")
|
||||
_LOGGER.debug("Logged in using stored token")
|
||||
|
||||
except MatrixRequestError as ex:
|
||||
_LOGGER.warning(
|
||||
|
@ -302,7 +302,7 @@ class MatrixBot:
|
|||
if not client:
|
||||
try:
|
||||
client = self._login_by_password()
|
||||
_LOGGER.debug("Logged in using password.")
|
||||
_LOGGER.debug("Logged in using password")
|
||||
|
||||
except MatrixRequestError as ex:
|
||||
_LOGGER.error(
|
||||
|
|
|
@ -906,7 +906,7 @@ async def websocket_handle_thumbnail(hass, connection, msg):
|
|||
return
|
||||
|
||||
_LOGGER.warning(
|
||||
"The websocket command media_player_thumbnail is deprecated. Use /api/media_player_proxy instead."
|
||||
"The websocket command media_player_thumbnail is deprecated. Use /api/media_player_proxy instead"
|
||||
)
|
||||
|
||||
data, content_type = await player.async_get_media_image()
|
||||
|
|
|
@ -79,7 +79,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
|||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Set up the MiFlora sensor."""
|
||||
backend = BACKEND
|
||||
_LOGGER.debug("Miflora is using %s backend.", backend.__name__)
|
||||
_LOGGER.debug("Miflora is using %s backend", backend.__name__)
|
||||
|
||||
cache = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL).total_seconds()
|
||||
poller = miflora_poller.MiFloraPoller(
|
||||
|
|
|
@ -72,7 +72,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
|||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Set up the MiTempBt sensor."""
|
||||
backend = BACKEND
|
||||
_LOGGER.debug("MiTempBt is using %s backend.", backend.__name__)
|
||||
_LOGGER.debug("MiTempBt is using %s backend", backend.__name__)
|
||||
|
||||
cache = config.get(CONF_CACHE)
|
||||
poller = mitemp_bt_poller.MiTempBtPoller(
|
||||
|
|
|
@ -38,7 +38,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
|
||||
modem = bm(port)
|
||||
if modem.state == modem.STATE_FAILED:
|
||||
_LOGGER.error("Unable to initialize modem.")
|
||||
_LOGGER.error("Unable to initialize modem")
|
||||
return
|
||||
|
||||
add_entities([ModemCalleridSensor(hass, name, port, modem)])
|
||||
|
|
|
@ -89,7 +89,7 @@ def valid_stations(stations, given_stations):
|
|||
if station is None:
|
||||
continue
|
||||
if not any(s.code == station.upper() for s in stations):
|
||||
_LOGGER.warning("Station '%s' is not a valid station.", station)
|
||||
_LOGGER.warning("Station '%s' is not a valid station", station)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
|
|
@ -347,7 +347,7 @@ class NestDevice:
|
|||
_LOGGER.warning(
|
||||
"Cannot retrieve device name for [%s]"
|
||||
", please check your Nest developer "
|
||||
"account permission settings.",
|
||||
"account permission settings",
|
||||
device.serial,
|
||||
)
|
||||
continue
|
||||
|
|
|
@ -103,7 +103,7 @@ class NestCamera(Camera):
|
|||
def turn_on(self):
|
||||
"""Turn on camera."""
|
||||
if not self._online:
|
||||
_LOGGER.error("Camera %s is offline.", self._name)
|
||||
_LOGGER.error("Camera %s is offline", self._name)
|
||||
return
|
||||
|
||||
_LOGGER.debug("Turn on camera %s", self._name)
|
||||
|
|
|
@ -532,7 +532,7 @@ class NetatmoPublicData:
|
|||
return
|
||||
|
||||
if data.CountStationInArea() == 0:
|
||||
_LOGGER.warning("No Stations available in this area.")
|
||||
_LOGGER.warning("No Stations available in this area")
|
||||
return
|
||||
|
||||
self.data = data
|
||||
|
|
|
@ -165,7 +165,7 @@ def setup(hass, config):
|
|||
"WARNING: This may poll your Leaf too often, and drain the 12V"
|
||||
" battery. If you drain your cars 12V battery it WILL NOT START"
|
||||
" as the drive train battery won't connect."
|
||||
" Don't set the intervals too low."
|
||||
" Don't set the intervals too low"
|
||||
)
|
||||
|
||||
data_store = LeafDataStore(hass, leaf, car_config)
|
||||
|
|
|
@ -122,7 +122,7 @@ class OpenAlprCloudEntity(ImageProcessingAlprEntity):
|
|||
data = await request.json()
|
||||
|
||||
if request.status != HTTP_OK:
|
||||
_LOGGER.error("Error %d -> %s.", request.status, data.get("error"))
|
||||
_LOGGER.error("Error %d -> %s", request.status, data.get("error"))
|
||||
return
|
||||
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||
|
|
|
@ -52,7 +52,7 @@ def setup(hass, config):
|
|||
try:
|
||||
interfaces_client.get_arp()
|
||||
except APIException:
|
||||
_LOGGER.exception("Failure while connecting to OPNsense API endpoint.")
|
||||
_LOGGER.exception("Failure while connecting to OPNsense API endpoint")
|
||||
return False
|
||||
|
||||
if tracker_interfaces:
|
||||
|
|
|
@ -377,7 +377,7 @@ async def async_handle_not_impl_msg(hass, context, message):
|
|||
|
||||
async def async_handle_unsupported_msg(hass, context, message):
|
||||
"""Handle an unsupported or invalid message type."""
|
||||
_LOGGER.warning("Received unsupported message type: %s.", message.get("_type"))
|
||||
_LOGGER.warning("Received unsupported message type: %s", message.get("_type"))
|
||||
|
||||
|
||||
async def async_handle_message(hass, context, message):
|
||||
|
|
|
@ -172,7 +172,7 @@ async def async_setup(hass: HomeAssistant, config: dict) -> bool:
|
|||
if entity_id not in persistent_notifications:
|
||||
_LOGGER.error(
|
||||
"Marking persistent_notification read failed: "
|
||||
"Notification ID %s not found.",
|
||||
"Notification ID %s not found",
|
||||
notification_id,
|
||||
)
|
||||
return
|
||||
|
|
|
@ -119,7 +119,7 @@ class PlaatoSensor(Entity):
|
|||
"""Return the state of the sensor."""
|
||||
sensors = self.get_sensors()
|
||||
if sensors is False:
|
||||
_LOGGER.debug("Device with name %s has no sensors.", self.name)
|
||||
_LOGGER.debug("Device with name %s has no sensors", self.name)
|
||||
return 0
|
||||
|
||||
if self._type == ATTR_ABV:
|
||||
|
|
|
@ -180,7 +180,7 @@ class PlexServer:
|
|||
f"hostname '{domain}' doesn't match"
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"Plex SSL certificate's hostname changed, updating."
|
||||
"Plex SSL certificate's hostname changed, updating"
|
||||
)
|
||||
if _update_plexdirect_hostname():
|
||||
config_entry_update_needed = True
|
||||
|
@ -199,7 +199,7 @@ class PlexServer:
|
|||
system_accounts = self._plex_server.systemAccounts()
|
||||
except Unauthorized:
|
||||
_LOGGER.warning(
|
||||
"Plex account has limited permissions, shared account filtering will not be available."
|
||||
"Plex account has limited permissions, shared account filtering will not be available"
|
||||
)
|
||||
else:
|
||||
self._accounts = [
|
||||
|
|
|
@ -37,7 +37,7 @@ class PoolSenseConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
|
||||
self._email = user_input[CONF_EMAIL]
|
||||
self._password = user_input[CONF_PASSWORD]
|
||||
_LOGGER.debug("Configuring user: %s - Password hidden.", self._email)
|
||||
_LOGGER.debug("Configuring user: %s - Password hidden", self._email)
|
||||
|
||||
poolsense = PoolSense()
|
||||
api_key_valid = await poolsense.test_poolsense_credentials(
|
||||
|
|
|
@ -51,10 +51,10 @@ class QuantumGatewayDeviceScanner(DeviceScanner):
|
|||
self.success_init = self.quantum.success_init
|
||||
except RequestException:
|
||||
self.success_init = False
|
||||
_LOGGER.error("Unable to connect to gateway. Check host.")
|
||||
_LOGGER.error("Unable to connect to gateway. Check host")
|
||||
|
||||
if not self.success_init:
|
||||
_LOGGER.error("Unable to login to gateway. Check password and host.")
|
||||
_LOGGER.error("Unable to login to gateway. Check password and host")
|
||||
|
||||
def scan_devices(self):
|
||||
"""Scan for new devices and return a list of found MACs."""
|
||||
|
|
|
@ -421,7 +421,7 @@ class Recorder(threading.Thread):
|
|||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.error(
|
||||
"Error in database connectivity during keepalive: %s.", err,
|
||||
"Error in database connectivity during keepalive: %s", err,
|
||||
)
|
||||
self._reopen_event_session()
|
||||
|
||||
|
|
|
@ -165,7 +165,7 @@ def _drop_index(engine, table_name, index_name):
|
|||
_LOGGER.warning(
|
||||
"Failed to drop index %s from table %s. Schema "
|
||||
"Migration will continue; this is not a "
|
||||
"critical operation.",
|
||||
"critical operation",
|
||||
index_name,
|
||||
table_name,
|
||||
)
|
||||
|
@ -195,7 +195,7 @@ def _add_columns(engine, table_name, columns_def):
|
|||
except (InternalError, OperationalError):
|
||||
# Some engines support adding all columns at once,
|
||||
# this error is when they don't
|
||||
_LOGGER.info("Unable to use quick column add. Adding 1 by 1.")
|
||||
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
|
||||
|
||||
for column_def in columns_def:
|
||||
try:
|
||||
|
|
|
@ -59,7 +59,7 @@ def purge_old_data(instance, purge_days: int, repack: bool) -> bool:
|
|||
# If states or events purging isn't processing the purge_before yet,
|
||||
# return false, as we are not done yet.
|
||||
if batch_purge_before != purge_before:
|
||||
_LOGGER.debug("Purging hasn't fully completed yet.")
|
||||
_LOGGER.debug("Purging hasn't fully completed yet")
|
||||
return False
|
||||
|
||||
# Recorder runs is small, no need to batch run it
|
||||
|
@ -94,7 +94,7 @@ def purge_old_data(instance, purge_days: int, repack: bool) -> bool:
|
|||
time.sleep(instance.db_retry_wait)
|
||||
return False
|
||||
|
||||
_LOGGER.warning("Error purging history: %s.", err)
|
||||
_LOGGER.warning("Error purging history: %s", err)
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.warning("Error purging history: %s.", err)
|
||||
_LOGGER.warning("Error purging history: %s", err)
|
||||
return True
|
||||
|
|
|
@ -216,7 +216,7 @@ class RestSensor(Entity):
|
|||
_LOGGER.debug("JSON converted from XML: %s", value)
|
||||
except ExpatError:
|
||||
_LOGGER.warning(
|
||||
"REST xml result could not be parsed and converted to JSON."
|
||||
"REST xml result could not be parsed and converted to JSON"
|
||||
)
|
||||
_LOGGER.debug("Erroneous XML: %s", value)
|
||||
|
||||
|
|
|
@ -122,22 +122,22 @@ async def async_setup(hass, config):
|
|||
|
||||
if response.status < HTTP_BAD_REQUEST:
|
||||
_LOGGER.debug(
|
||||
"Success. Url: %s. Status code: %d.",
|
||||
"Success. Url: %s. Status code: %d",
|
||||
response.url,
|
||||
response.status,
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Error. Url: %s. Status code %d.",
|
||||
"Error. Url: %s. Status code %d",
|
||||
response.url,
|
||||
response.status,
|
||||
)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Timeout call %s.", response.url, exc_info=1)
|
||||
_LOGGER.warning("Timeout call %s", response.url, exc_info=1)
|
||||
|
||||
except aiohttp.ClientError:
|
||||
_LOGGER.error("Client error %s.", request_url, exc_info=1)
|
||||
_LOGGER.error("Client error %s", request_url, exc_info=1)
|
||||
|
||||
# register services
|
||||
hass.services.async_register(DOMAIN, name, async_service_handler)
|
||||
|
|
|
@ -231,7 +231,7 @@ async def _async_process_config(hass, config, component):
|
|||
entity_id = ENTITY_ID_FORMAT.format(service.service)
|
||||
script_entity = component.get_entity(entity_id)
|
||||
if script_entity.script.is_legacy and script_entity.is_on:
|
||||
_LOGGER.warning("Script %s already running.", entity_id)
|
||||
_LOGGER.warning("Script %s already running", entity_id)
|
||||
return
|
||||
await script_entity.async_turn_on(
|
||||
variables=service.data, context=service.context
|
||||
|
|
|
@ -101,7 +101,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||
asyncio.TimeoutError,
|
||||
pysensibo.SensiboError,
|
||||
):
|
||||
_LOGGER.exception("Failed to connect to Sensibo servers.")
|
||||
_LOGGER.exception("Failed to connect to Sensibo servers")
|
||||
raise PlatformNotReady
|
||||
|
||||
if not devices:
|
||||
|
@ -398,5 +398,5 @@ class SensiboClimate(ClimateEntity):
|
|||
data = await self._client.async_get_device(self._id, _FETCH_FIELDS)
|
||||
self._do_update(data)
|
||||
except (aiohttp.client_exceptions.ClientError, pysensibo.SensiboError):
|
||||
_LOGGER.warning("Failed to connect to Sensibo servers.")
|
||||
_LOGGER.warning("Failed to connect to Sensibo servers")
|
||||
self._available = False
|
||||
|
|
|
@ -65,7 +65,7 @@ class SignalNotificationService(BaseNotificationService):
|
|||
filenames = data[ATTR_FILENAMES]
|
||||
if ATTR_FILENAME in data:
|
||||
_LOGGER.warning(
|
||||
"The 'attachment' option is deprecated, please replace it with 'attachments'. This option will become invalid in version 0.108."
|
||||
"The 'attachment' option is deprecated, please replace it with 'attachments'. This option will become invalid in version 0.108"
|
||||
)
|
||||
if filenames is None:
|
||||
filenames = [data[ATTR_FILENAME]]
|
||||
|
|
|
@ -524,7 +524,7 @@ class SimpliSafe:
|
|||
if isinstance(result, InvalidCredentialsError):
|
||||
if self._emergency_refresh_token_used:
|
||||
_LOGGER.error(
|
||||
"SimpliSafe authentication disconnected. Please restart HASS."
|
||||
"SimpliSafe authentication disconnected. Please restart HASS"
|
||||
)
|
||||
remove_listener = self._hass.data[DOMAIN][DATA_LISTENER].pop(
|
||||
self._config_entry.entry_id
|
||||
|
|
|
@ -138,7 +138,7 @@ class MailNotificationService(BaseNotificationService):
|
|||
except (smtplib.socket.gaierror, ConnectionRefusedError):
|
||||
_LOGGER.exception(
|
||||
"SMTP server not found or refused connection (%s:%s). "
|
||||
"Please check the IP address, hostname, and availability of your SMTP server.",
|
||||
"Please check the IP address, hostname, and availability of your SMTP server",
|
||||
self._server,
|
||||
self._port,
|
||||
)
|
||||
|
|
|
@ -161,7 +161,7 @@ async def async_setup(hass, config):
|
|||
"Received unknown intent %s", request["intent"]["intentName"]
|
||||
)
|
||||
except intent.IntentError:
|
||||
_LOGGER.exception("Error while handling intent: %s.", intent_type)
|
||||
_LOGGER.exception("Error while handling intent: %s", intent_type)
|
||||
|
||||
await hass.components.mqtt.async_subscribe(INTENT_TOPIC, message_received)
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ async def async_setup_platform(
|
|||
) -> None:
|
||||
"""Set up from legacy configuration file. Obsolete."""
|
||||
_LOGGER.error(
|
||||
"Configuring Songpal through media_player platform is no longer supported. Convert to songpal platform or UI configuration."
|
||||
"Configuring Songpal through media_player platform is no longer supported. Convert to songpal platform or UI configuration"
|
||||
)
|
||||
|
||||
|
||||
|
@ -75,7 +75,7 @@ async def async_setup_entry(
|
|||
): # set timeout to avoid blocking the setup process
|
||||
await device.get_supported_methods()
|
||||
except (SongpalException, asyncio.TimeoutError) as ex:
|
||||
_LOGGER.warning("[%s(%s)] Unable to connect.", name, endpoint)
|
||||
_LOGGER.warning("[%s(%s)] Unable to connect", name, endpoint)
|
||||
_LOGGER.debug("Unable to get methods from songpal: %s", ex)
|
||||
raise PlatformNotReady
|
||||
|
||||
|
@ -128,7 +128,7 @@ class SongpalEntity(MediaPlayerEntity):
|
|||
|
||||
async def async_activate_websocket(self):
|
||||
"""Activate websocket for listening if wanted."""
|
||||
_LOGGER.info("Activating websocket connection..")
|
||||
_LOGGER.info("Activating websocket connection")
|
||||
|
||||
async def _volume_changed(volume: VolumeChange):
|
||||
_LOGGER.debug("Volume changed: %s", volume)
|
||||
|
@ -152,7 +152,7 @@ class SongpalEntity(MediaPlayerEntity):
|
|||
|
||||
async def _try_reconnect(connect: ConnectChange):
|
||||
_LOGGER.warning(
|
||||
"[%s(%s)] Got disconnected, trying to reconnect.",
|
||||
"[%s(%s)] Got disconnected, trying to reconnect",
|
||||
self.name,
|
||||
self._dev.endpoint,
|
||||
)
|
||||
|
@ -179,7 +179,7 @@ class SongpalEntity(MediaPlayerEntity):
|
|||
|
||||
self.hass.loop.create_task(self._dev.listen_notifications())
|
||||
_LOGGER.warning(
|
||||
"[%s(%s)] Connection reestablished.", self.name, self._dev.endpoint
|
||||
"[%s(%s)] Connection reestablished", self.name, self._dev.endpoint
|
||||
)
|
||||
|
||||
self._dev.on_notification(VolumeChange, _volume_changed)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue