2019-02-13 20:21:14 +00:00
|
|
|
"""Support for testing internet speed via Speedtest.net."""
|
2019-04-04 06:31:55 +00:00
|
|
|
from datetime import timedelta
|
2019-10-15 23:20:59 +00:00
|
|
|
import logging
|
2019-02-04 08:47:04 +00:00
|
|
|
|
2019-10-15 23:20:59 +00:00
|
|
|
import speedtest
|
2019-02-04 08:47:04 +00:00
|
|
|
import voluptuous as vol
|
|
|
|
|
2020-06-10 16:33:48 +00:00
|
|
|
from homeassistant.config_entries import SOURCE_IMPORT
|
2020-07-28 05:57:36 +00:00
|
|
|
from homeassistant.const import (
|
|
|
|
CONF_MONITORED_CONDITIONS,
|
|
|
|
CONF_SCAN_INTERVAL,
|
|
|
|
EVENT_HOMEASSISTANT_STARTED,
|
|
|
|
)
|
2020-08-20 00:32:01 +00:00
|
|
|
from homeassistant.core import CoreState, callback
|
2020-06-10 16:33:48 +00:00
|
|
|
from homeassistant.exceptions import ConfigEntryNotReady
|
2019-10-15 23:20:59 +00:00
|
|
|
import homeassistant.helpers.config_validation as cv
|
2020-06-10 16:33:48 +00:00
|
|
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
|
|
|
|
|
|
|
from .const import (
|
|
|
|
CONF_MANUAL,
|
|
|
|
CONF_SERVER_ID,
|
|
|
|
DEFAULT_SCAN_INTERVAL,
|
|
|
|
DEFAULT_SERVER,
|
|
|
|
DOMAIN,
|
|
|
|
SENSOR_TYPES,
|
|
|
|
SPEED_TEST_SERVICE,
|
|
|
|
)
|
2019-03-21 05:56:46 +00:00
|
|
|
|
2019-02-04 08:47:04 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
CONFIG_SCHEMA = vol.Schema(
|
2021-05-04 20:18:22 +00:00
|
|
|
vol.All(
|
|
|
|
# Deprecated in Home Assistant 2021.6
|
|
|
|
cv.deprecated(DOMAIN),
|
|
|
|
{
|
|
|
|
DOMAIN: vol.Schema(
|
|
|
|
{
|
|
|
|
vol.Optional(CONF_SERVER_ID): cv.positive_int,
|
|
|
|
vol.Optional(
|
|
|
|
CONF_SCAN_INTERVAL,
|
|
|
|
default=timedelta(minutes=DEFAULT_SCAN_INTERVAL),
|
|
|
|
): cv.positive_time_period,
|
|
|
|
vol.Optional(CONF_MANUAL, default=False): cv.boolean,
|
|
|
|
vol.Optional(
|
|
|
|
CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)
|
|
|
|
): vol.All(cv.ensure_list, [vol.In(list(SENSOR_TYPES))]),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
),
|
2019-07-31 19:25:30 +00:00
|
|
|
extra=vol.ALLOW_EXTRA,
|
|
|
|
)
|
2019-02-04 08:47:04 +00:00
|
|
|
|
2021-04-27 20:10:04 +00:00
|
|
|
PLATFORMS = ["sensor"]
|
|
|
|
|
2019-02-04 08:47:04 +00:00
|
|
|
|
2020-06-10 16:33:48 +00:00
|
|
|
def server_id_valid(server_id):
|
|
|
|
"""Check if server_id is valid."""
|
|
|
|
try:
|
|
|
|
api = speedtest.Speedtest()
|
|
|
|
api.get_servers([int(server_id)])
|
|
|
|
except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2019-02-04 08:47:04 +00:00
|
|
|
async def async_setup(hass, config):
|
2020-06-10 16:33:48 +00:00
|
|
|
"""Import integration from config."""
|
|
|
|
if DOMAIN in config:
|
|
|
|
hass.async_create_task(
|
|
|
|
hass.config_entries.flow.async_init(
|
|
|
|
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return True
|
2019-02-04 08:47:04 +00:00
|
|
|
|
|
|
|
|
2020-06-10 16:33:48 +00:00
|
|
|
async def async_setup_entry(hass, config_entry):
|
|
|
|
"""Set up the Speedtest.net component."""
|
|
|
|
coordinator = SpeedTestDataCoordinator(hass, config_entry)
|
|
|
|
await coordinator.async_setup()
|
|
|
|
|
2020-07-28 05:57:36 +00:00
|
|
|
async def _enable_scheduled_speedtests(*_):
|
|
|
|
"""Activate the data update coordinator."""
|
|
|
|
coordinator.update_interval = timedelta(
|
|
|
|
minutes=config_entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
|
|
|
|
)
|
2020-07-06 22:18:56 +00:00
|
|
|
await coordinator.async_refresh()
|
2020-07-28 05:57:36 +00:00
|
|
|
|
|
|
|
if not config_entry.options[CONF_MANUAL]:
|
|
|
|
if hass.state == CoreState.running:
|
|
|
|
await _enable_scheduled_speedtests()
|
|
|
|
if not coordinator.last_update_success:
|
|
|
|
raise ConfigEntryNotReady
|
|
|
|
else:
|
|
|
|
# Running a speed test during startup can prevent
|
|
|
|
# integrations from being able to setup because it
|
|
|
|
# can saturate the network interface.
|
|
|
|
hass.bus.async_listen_once(
|
|
|
|
EVENT_HOMEASSISTANT_STARTED, _enable_scheduled_speedtests
|
|
|
|
)
|
2019-02-04 08:47:04 +00:00
|
|
|
|
2020-06-10 16:33:48 +00:00
|
|
|
hass.data[DOMAIN] = coordinator
|
2019-02-04 08:47:04 +00:00
|
|
|
|
2021-04-27 20:10:04 +00:00
|
|
|
hass.config_entries.async_setup_platforms(config_entry, PLATFORMS)
|
2019-02-04 08:47:04 +00:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2020-06-10 16:33:48 +00:00
|
|
|
async def async_unload_entry(hass, config_entry):
|
|
|
|
"""Unload SpeedTest Entry from config_entry."""
|
|
|
|
hass.services.async_remove(DOMAIN, SPEED_TEST_SERVICE)
|
|
|
|
|
2020-08-20 00:32:01 +00:00
|
|
|
hass.data[DOMAIN].async_unload()
|
|
|
|
|
2021-04-27 20:10:04 +00:00
|
|
|
unload_ok = await hass.config_entries.async_unload_platforms(
|
|
|
|
config_entry, PLATFORMS
|
|
|
|
)
|
|
|
|
if unload_ok:
|
|
|
|
hass.data.pop(DOMAIN)
|
|
|
|
return unload_ok
|
2020-06-10 16:33:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
class SpeedTestDataCoordinator(DataUpdateCoordinator):
|
2019-02-04 08:47:04 +00:00
|
|
|
"""Get the latest data from speedtest.net."""
|
|
|
|
|
2020-06-10 16:33:48 +00:00
|
|
|
def __init__(self, hass, config_entry):
|
2019-02-04 08:47:04 +00:00
|
|
|
"""Initialize the data object."""
|
2020-06-10 16:33:48 +00:00
|
|
|
self.hass = hass
|
|
|
|
self.config_entry = config_entry
|
|
|
|
self.api = None
|
|
|
|
self.servers = {}
|
2020-08-20 00:32:01 +00:00
|
|
|
self._unsub_update_listener = None
|
2020-06-10 16:33:48 +00:00
|
|
|
super().__init__(
|
2020-08-27 11:56:20 +00:00
|
|
|
self.hass,
|
|
|
|
_LOGGER,
|
|
|
|
name=DOMAIN,
|
|
|
|
update_method=self.async_update,
|
2020-07-13 19:32:22 +00:00
|
|
|
)
|
2019-02-04 08:47:04 +00:00
|
|
|
|
2020-07-06 22:18:56 +00:00
|
|
|
def update_servers(self):
|
|
|
|
"""Update list of test servers."""
|
|
|
|
try:
|
|
|
|
server_list = self.api.get_servers()
|
|
|
|
except speedtest.ConfigRetrievalError:
|
2020-10-12 04:03:45 +00:00
|
|
|
_LOGGER.debug("Error retrieving server list")
|
2020-07-06 22:18:56 +00:00
|
|
|
return
|
2020-06-10 16:33:48 +00:00
|
|
|
|
|
|
|
self.servers[DEFAULT_SERVER] = {}
|
|
|
|
for server in sorted(
|
2020-09-15 05:50:44 +00:00
|
|
|
server_list.values(),
|
|
|
|
key=lambda server: server[0]["country"] + server[0]["sponsor"],
|
2020-06-10 16:33:48 +00:00
|
|
|
):
|
2020-09-15 05:50:44 +00:00
|
|
|
self.servers[
|
|
|
|
f"{server[0]['country']} - {server[0]['sponsor']} - {server[0]['name']}"
|
|
|
|
] = server[0]
|
2020-06-10 16:33:48 +00:00
|
|
|
|
2020-07-06 22:18:56 +00:00
|
|
|
def update_data(self):
|
|
|
|
"""Get the latest data from speedtest.net."""
|
|
|
|
self.update_servers()
|
|
|
|
|
|
|
|
self.api.closest.clear()
|
2020-06-10 16:33:48 +00:00
|
|
|
if self.config_entry.options.get(CONF_SERVER_ID):
|
|
|
|
server_id = self.config_entry.options.get(CONF_SERVER_ID)
|
|
|
|
self.api.get_servers(servers=[server_id])
|
2020-07-06 22:18:56 +00:00
|
|
|
|
2021-05-19 00:33:37 +00:00
|
|
|
try:
|
|
|
|
self.api.get_best_server()
|
|
|
|
except speedtest.SpeedtestBestServerFailure as err:
|
|
|
|
raise UpdateFailed(
|
|
|
|
"Failed to retrieve best server for speedtest", err
|
|
|
|
) from err
|
2020-06-10 16:33:48 +00:00
|
|
|
|
2021-05-19 09:43:41 +00:00
|
|
|
_LOGGER.debug(
|
|
|
|
"Executing speedtest.net speed test with server_id: %s",
|
|
|
|
self.api.best["id"],
|
|
|
|
)
|
|
|
|
self.api.download()
|
|
|
|
self.api.upload()
|
|
|
|
return self.api.results.dict()
|
|
|
|
|
2020-06-10 16:33:48 +00:00
|
|
|
async def async_update(self, *_):
|
|
|
|
"""Update Speedtest data."""
|
|
|
|
try:
|
|
|
|
return await self.hass.async_add_executor_job(self.update_data)
|
2020-08-28 11:50:32 +00:00
|
|
|
except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers) as err:
|
|
|
|
raise UpdateFailed from err
|
2020-06-10 16:33:48 +00:00
|
|
|
|
|
|
|
async def async_set_options(self):
|
|
|
|
"""Set options for entry."""
|
|
|
|
if not self.config_entry.options:
|
|
|
|
data = {**self.config_entry.data}
|
|
|
|
options = {
|
|
|
|
CONF_SCAN_INTERVAL: data.pop(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL),
|
|
|
|
CONF_MANUAL: data.pop(CONF_MANUAL, False),
|
|
|
|
CONF_SERVER_ID: str(data.pop(CONF_SERVER_ID, "")),
|
|
|
|
}
|
|
|
|
self.hass.config_entries.async_update_entry(
|
|
|
|
self.config_entry, data=data, options=options
|
|
|
|
)
|
|
|
|
|
|
|
|
async def async_setup(self):
|
|
|
|
"""Set up SpeedTest."""
|
|
|
|
try:
|
|
|
|
self.api = await self.hass.async_add_executor_job(speedtest.Speedtest)
|
2020-08-28 11:50:32 +00:00
|
|
|
except speedtest.ConfigRetrievalError as err:
|
|
|
|
raise ConfigEntryNotReady from err
|
2020-06-10 16:33:48 +00:00
|
|
|
|
|
|
|
async def request_update(call):
|
|
|
|
"""Request update."""
|
|
|
|
await self.async_request_refresh()
|
|
|
|
|
|
|
|
await self.async_set_options()
|
2019-07-31 19:25:30 +00:00
|
|
|
|
2020-07-06 22:18:56 +00:00
|
|
|
await self.hass.async_add_executor_job(self.update_servers)
|
|
|
|
|
2020-06-10 16:33:48 +00:00
|
|
|
self.hass.services.async_register(DOMAIN, SPEED_TEST_SERVICE, request_update)
|
|
|
|
|
2020-08-20 00:32:01 +00:00
|
|
|
self._unsub_update_listener = self.config_entry.add_update_listener(
|
|
|
|
options_updated_listener
|
|
|
|
)
|
|
|
|
|
|
|
|
@callback
|
|
|
|
def async_unload(self):
|
|
|
|
"""Unload the coordinator."""
|
|
|
|
if not self._unsub_update_listener:
|
|
|
|
return
|
|
|
|
self._unsub_update_listener()
|
|
|
|
self._unsub_update_listener = None
|
2020-06-10 16:33:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def options_updated_listener(hass, entry):
|
|
|
|
"""Handle options update."""
|
2020-07-13 19:32:22 +00:00
|
|
|
if entry.options[CONF_MANUAL]:
|
|
|
|
hass.data[DOMAIN].update_interval = None
|
2020-06-10 16:33:48 +00:00
|
|
|
return
|
2020-07-13 19:32:22 +00:00
|
|
|
|
|
|
|
hass.data[DOMAIN].update_interval = timedelta(
|
|
|
|
minutes=entry.options[CONF_SCAN_INTERVAL]
|
|
|
|
)
|
|
|
|
await hass.data[DOMAIN].async_request_refresh()
|