2019-04-25 03:09:01 +00:00
|
|
|
"""Support for IQVIA."""
|
2019-04-26 17:06:46 +00:00
|
|
|
import asyncio
|
2019-04-25 03:09:01 +00:00
|
|
|
from datetime import timedelta
|
|
|
|
import logging
|
|
|
|
|
2019-04-26 17:06:46 +00:00
|
|
|
from pyiqvia import Client
|
|
|
|
from pyiqvia.errors import IQVIAError, InvalidZipError
|
|
|
|
|
2019-04-25 03:09:01 +00:00
|
|
|
import voluptuous as vol
|
|
|
|
|
2019-05-09 16:11:51 +00:00
|
|
|
from homeassistant.config_entries import SOURCE_IMPORT
|
2019-04-26 17:06:46 +00:00
|
|
|
from homeassistant.const import ATTR_ATTRIBUTION, CONF_MONITORED_CONDITIONS
|
2019-04-25 03:09:01 +00:00
|
|
|
from homeassistant.core import callback
|
2019-04-26 17:06:46 +00:00
|
|
|
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
2019-04-25 03:09:01 +00:00
|
|
|
from homeassistant.helpers.dispatcher import (
|
|
|
|
async_dispatcher_connect, async_dispatcher_send)
|
|
|
|
from homeassistant.helpers.entity import Entity
|
|
|
|
from homeassistant.helpers.event import async_track_time_interval
|
2019-04-26 17:06:46 +00:00
|
|
|
from homeassistant.util.decorator import Registry
|
2019-04-25 03:09:01 +00:00
|
|
|
|
2019-05-09 16:11:51 +00:00
|
|
|
from .config_flow import configured_instances
|
2019-04-25 03:09:01 +00:00
|
|
|
from .const import (
|
2019-05-09 16:11:51 +00:00
|
|
|
CONF_ZIP_CODE, DATA_CLIENT, DATA_LISTENER, DOMAIN, SENSORS,
|
|
|
|
TOPIC_DATA_UPDATE, TYPE_ALLERGY_FORECAST, TYPE_ALLERGY_INDEX,
|
|
|
|
TYPE_ALLERGY_OUTLOOK, TYPE_ALLERGY_TODAY, TYPE_ALLERGY_TOMORROW,
|
|
|
|
TYPE_ASTHMA_FORECAST, TYPE_ASTHMA_INDEX, TYPE_ASTHMA_TODAY,
|
|
|
|
TYPE_ASTHMA_TOMORROW, TYPE_DISEASE_FORECAST, TYPE_DISEASE_INDEX,
|
|
|
|
TYPE_DISEASE_TODAY)
|
2019-04-25 03:09:01 +00:00
|
|
|
|
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
DATA_CONFIG = 'config'
|
|
|
|
|
|
|
|
DEFAULT_ATTRIBUTION = 'Data provided by IQVIA™'
|
|
|
|
DEFAULT_SCAN_INTERVAL = timedelta(minutes=30)
|
|
|
|
|
2019-04-26 17:06:46 +00:00
|
|
|
FETCHER_MAPPING = {
|
|
|
|
(TYPE_ALLERGY_FORECAST,): (TYPE_ALLERGY_FORECAST, TYPE_ALLERGY_OUTLOOK),
|
2019-05-08 07:17:41 +00:00
|
|
|
(TYPE_ALLERGY_TODAY, TYPE_ALLERGY_TOMORROW): (TYPE_ALLERGY_INDEX,),
|
2019-04-26 17:06:46 +00:00
|
|
|
(TYPE_ASTHMA_FORECAST,): (TYPE_ASTHMA_FORECAST,),
|
2019-05-08 07:17:41 +00:00
|
|
|
(TYPE_ASTHMA_TODAY, TYPE_ASTHMA_TOMORROW): (TYPE_ASTHMA_INDEX,),
|
2019-04-26 17:06:46 +00:00
|
|
|
(TYPE_DISEASE_FORECAST,): (TYPE_DISEASE_FORECAST,),
|
2019-04-29 15:06:23 +00:00
|
|
|
(TYPE_DISEASE_TODAY,): (TYPE_DISEASE_INDEX,),
|
2019-04-26 17:06:46 +00:00
|
|
|
}
|
|
|
|
|
2019-04-25 03:09:01 +00:00
|
|
|
CONFIG_SCHEMA = vol.Schema({
|
|
|
|
DOMAIN: vol.Schema({
|
|
|
|
vol.Required(CONF_ZIP_CODE): str,
|
|
|
|
vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSORS)):
|
2019-05-08 07:17:41 +00:00
|
|
|
vol.All(cv.ensure_list, [vol.In(SENSORS)]),
|
2019-04-25 03:09:01 +00:00
|
|
|
})
|
|
|
|
}, extra=vol.ALLOW_EXTRA)
|
|
|
|
|
|
|
|
|
|
|
|
async def async_setup(hass, config):
|
|
|
|
"""Set up the IQVIA component."""
|
|
|
|
hass.data[DOMAIN] = {}
|
|
|
|
hass.data[DOMAIN][DATA_CLIENT] = {}
|
|
|
|
hass.data[DOMAIN][DATA_LISTENER] = {}
|
|
|
|
|
2019-05-09 16:11:51 +00:00
|
|
|
if DOMAIN not in config:
|
|
|
|
return True
|
|
|
|
|
2019-04-25 03:09:01 +00:00
|
|
|
conf = config[DOMAIN]
|
|
|
|
|
2019-05-09 16:11:51 +00:00
|
|
|
if conf[CONF_ZIP_CODE] in configured_instances(hass):
|
|
|
|
return True
|
|
|
|
|
|
|
|
hass.async_create_task(
|
|
|
|
hass.config_entries.flow.async_init(
|
|
|
|
DOMAIN, context={'source': SOURCE_IMPORT}, data=conf))
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
async def async_setup_entry(hass, config_entry):
|
|
|
|
"""Set up IQVIA as config entry."""
|
2019-04-25 03:09:01 +00:00
|
|
|
websession = aiohttp_client.async_get_clientsession(hass)
|
|
|
|
|
|
|
|
try:
|
|
|
|
iqvia = IQVIAData(
|
2019-05-09 16:11:51 +00:00
|
|
|
Client(config_entry.data[CONF_ZIP_CODE], websession),
|
|
|
|
config_entry.data.get(CONF_MONITORED_CONDITIONS, list(SENSORS)))
|
2019-04-25 03:09:01 +00:00
|
|
|
await iqvia.async_update()
|
|
|
|
except IQVIAError as err:
|
|
|
|
_LOGGER.error('Unable to set up IQVIA: %s', err)
|
|
|
|
return False
|
|
|
|
|
2019-05-09 16:11:51 +00:00
|
|
|
hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id] = iqvia
|
2019-04-25 03:09:01 +00:00
|
|
|
|
2019-04-26 17:06:46 +00:00
|
|
|
hass.async_create_task(
|
2019-05-09 16:11:51 +00:00
|
|
|
hass.config_entries.async_forward_entry_setup(
|
|
|
|
config_entry, 'sensor'))
|
2019-04-25 03:09:01 +00:00
|
|
|
|
|
|
|
async def refresh(event_time):
|
|
|
|
"""Refresh IQVIA data."""
|
|
|
|
_LOGGER.debug('Updating IQVIA data')
|
|
|
|
await iqvia.async_update()
|
|
|
|
async_dispatcher_send(hass, TOPIC_DATA_UPDATE)
|
|
|
|
|
2019-05-09 16:11:51 +00:00
|
|
|
hass.data[DOMAIN][DATA_LISTENER][
|
|
|
|
config_entry.entry_id] = async_track_time_interval(
|
|
|
|
hass, refresh, DEFAULT_SCAN_INTERVAL)
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
async def async_unload_entry(hass, config_entry):
|
|
|
|
"""Unload an OpenUV config entry."""
|
|
|
|
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
|
|
|
|
|
|
|
|
remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(
|
|
|
|
config_entry.entry_id)
|
|
|
|
remove_listener()
|
|
|
|
|
|
|
|
await hass.config_entries.async_forward_entry_unload(
|
|
|
|
config_entry, 'sensor')
|
2019-04-25 03:09:01 +00:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
class IQVIAData:
|
|
|
|
"""Define a data object to retrieve info from IQVIA."""
|
|
|
|
|
|
|
|
def __init__(self, client, sensor_types):
|
|
|
|
"""Initialize."""
|
|
|
|
self._client = client
|
|
|
|
self.data = {}
|
|
|
|
self.sensor_types = sensor_types
|
|
|
|
self.zip_code = client.zip_code
|
|
|
|
|
2019-04-26 17:06:46 +00:00
|
|
|
self.fetchers = Registry()
|
|
|
|
self.fetchers.register(TYPE_ALLERGY_FORECAST)(
|
|
|
|
self._client.allergens.extended)
|
|
|
|
self.fetchers.register(TYPE_ALLERGY_OUTLOOK)(
|
|
|
|
self._client.allergens.outlook)
|
|
|
|
self.fetchers.register(TYPE_ALLERGY_INDEX)(
|
|
|
|
self._client.allergens.current)
|
|
|
|
self.fetchers.register(TYPE_ASTHMA_FORECAST)(
|
|
|
|
self._client.asthma.extended)
|
|
|
|
self.fetchers.register(TYPE_ASTHMA_INDEX)(self._client.asthma.current)
|
|
|
|
self.fetchers.register(TYPE_DISEASE_FORECAST)(
|
|
|
|
self._client.disease.extended)
|
2019-04-29 15:06:23 +00:00
|
|
|
self.fetchers.register(TYPE_DISEASE_INDEX)(
|
|
|
|
self._client.disease.current)
|
2019-04-25 03:09:01 +00:00
|
|
|
|
|
|
|
async def async_update(self):
|
|
|
|
"""Update IQVIA data."""
|
2019-04-26 17:06:46 +00:00
|
|
|
tasks = {}
|
|
|
|
|
|
|
|
for conditions, fetcher_types in FETCHER_MAPPING.items():
|
|
|
|
if not any(c in self.sensor_types for c in conditions):
|
|
|
|
continue
|
|
|
|
|
|
|
|
for fetcher_type in fetcher_types:
|
|
|
|
tasks[fetcher_type] = self.fetchers[fetcher_type]()
|
|
|
|
|
|
|
|
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
|
2019-04-25 03:09:01 +00:00
|
|
|
|
|
|
|
# IQVIA sites require a bit more complicated error handling, given that
|
2019-04-26 17:06:46 +00:00
|
|
|
# they sometimes have parts (but not the whole thing) go down:
|
|
|
|
# 1. If `InvalidZipError` is thrown, quit everything immediately.
|
|
|
|
# 2. If a single request throws any other error, try the others.
|
|
|
|
for key, result in zip(tasks, results):
|
|
|
|
if isinstance(result, InvalidZipError):
|
|
|
|
_LOGGER.error("No data for ZIP: %s", self._client.zip_code)
|
|
|
|
self.data = {}
|
|
|
|
return
|
|
|
|
|
|
|
|
if isinstance(result, IQVIAError):
|
|
|
|
_LOGGER.error('Unable to get %s data: %s', key, result)
|
|
|
|
self.data[key] = {}
|
|
|
|
continue
|
|
|
|
|
|
|
|
_LOGGER.debug('Loaded new %s data', key)
|
|
|
|
self.data[key] = result
|
2019-04-25 03:09:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
class IQVIAEntity(Entity):
|
|
|
|
"""Define a base IQVIA entity."""
|
|
|
|
|
2019-04-26 17:06:46 +00:00
|
|
|
def __init__(self, iqvia, sensor_type, name, icon, zip_code):
|
2019-04-25 03:09:01 +00:00
|
|
|
"""Initialize the sensor."""
|
|
|
|
self._async_unsub_dispatcher_connect = None
|
|
|
|
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
|
|
|
|
self._icon = icon
|
|
|
|
self._iqvia = iqvia
|
|
|
|
self._name = name
|
|
|
|
self._state = None
|
2019-04-26 17:06:46 +00:00
|
|
|
self._type = sensor_type
|
2019-04-25 03:09:01 +00:00
|
|
|
self._zip_code = zip_code
|
|
|
|
|
|
|
|
@property
|
|
|
|
def available(self):
|
|
|
|
"""Return True if entity is available."""
|
2019-04-27 05:28:55 +00:00
|
|
|
if self._type in (TYPE_ALLERGY_TODAY, TYPE_ALLERGY_TOMORROW):
|
2019-04-25 03:09:01 +00:00
|
|
|
return self._iqvia.data.get(TYPE_ALLERGY_INDEX) is not None
|
|
|
|
|
2019-04-27 05:28:55 +00:00
|
|
|
if self._type in (TYPE_ASTHMA_TODAY, TYPE_ASTHMA_TOMORROW):
|
2019-04-25 03:09:01 +00:00
|
|
|
return self._iqvia.data.get(TYPE_ASTHMA_INDEX) is not None
|
|
|
|
|
2019-04-29 15:06:23 +00:00
|
|
|
if self._type == TYPE_DISEASE_TODAY:
|
|
|
|
return self._iqvia.data.get(TYPE_DISEASE_INDEX) is not None
|
|
|
|
|
2019-04-26 17:06:46 +00:00
|
|
|
return self._iqvia.data.get(self._type) is not None
|
2019-04-25 03:09:01 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def device_state_attributes(self):
|
|
|
|
"""Return the device state attributes."""
|
|
|
|
return self._attrs
|
|
|
|
|
|
|
|
@property
|
|
|
|
def icon(self):
|
|
|
|
"""Return the icon."""
|
|
|
|
return self._icon
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
"""Return the name."""
|
|
|
|
return self._name
|
|
|
|
|
|
|
|
@property
|
|
|
|
def state(self):
|
|
|
|
"""Return the state."""
|
|
|
|
return self._state
|
|
|
|
|
|
|
|
@property
|
|
|
|
def unique_id(self):
|
|
|
|
"""Return a unique, HASS-friendly identifier for this entity."""
|
2019-04-26 17:06:46 +00:00
|
|
|
return '{0}_{1}'.format(self._zip_code, self._type)
|
2019-04-25 03:09:01 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def unit_of_measurement(self):
|
|
|
|
"""Return the unit the value is expressed in."""
|
|
|
|
return 'index'
|
|
|
|
|
|
|
|
async def async_added_to_hass(self):
|
|
|
|
"""Register callbacks."""
|
|
|
|
@callback
|
|
|
|
def update():
|
|
|
|
"""Update the state."""
|
|
|
|
self.async_schedule_update_ha_state(True)
|
|
|
|
|
|
|
|
self._async_unsub_dispatcher_connect = async_dispatcher_connect(
|
|
|
|
self.hass, TOPIC_DATA_UPDATE, update)
|
|
|
|
|
|
|
|
async def async_will_remove_from_hass(self):
|
|
|
|
"""Disconnect dispatcher listener when removed."""
|
|
|
|
if self._async_unsub_dispatcher_connect:
|
|
|
|
self._async_unsub_dispatcher_connect()
|