2019-02-13 20:21:14 +00:00
|
|
|
"""Support for Z-Wave."""
|
2017-02-23 21:06:28 +00:00
|
|
|
import asyncio
|
2017-03-14 23:55:33 +00:00
|
|
|
import copy
|
2019-04-12 16:22:56 +00:00
|
|
|
from importlib import import_module
|
2016-03-27 18:29:39 +00:00
|
|
|
import logging
|
2015-02-26 07:27:17 +00:00
|
|
|
from pprint import pprint
|
2016-10-05 12:40:08 +00:00
|
|
|
|
2016-08-21 21:36:44 +00:00
|
|
|
import voluptuous as vol
|
2016-02-19 05:27:50 +00:00
|
|
|
|
2018-10-09 14:30:55 +00:00
|
|
|
from homeassistant import config_entries
|
2018-05-08 19:30:28 +00:00
|
|
|
from homeassistant.core import callback, CoreState
|
2017-02-13 07:55:27 +00:00
|
|
|
from homeassistant.helpers import discovery
|
2017-06-16 17:07:17 +00:00
|
|
|
from homeassistant.helpers.entity import generate_entity_id
|
2019-06-03 16:40:40 +00:00
|
|
|
from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL
|
|
|
|
from homeassistant.helpers.entity_platform import EntityPlatform
|
2018-05-12 21:45:36 +00:00
|
|
|
from homeassistant.helpers.entity_registry import async_get_registry
|
2015-02-23 01:36:28 +00:00
|
|
|
from homeassistant.const import (
|
2017-03-23 15:37:20 +00:00
|
|
|
ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP)
|
2017-02-16 23:19:22 +00:00
|
|
|
from homeassistant.helpers.entity_values import EntityValues
|
2018-05-12 21:45:36 +00:00
|
|
|
from homeassistant.helpers.event import async_track_time_change
|
2018-03-11 14:30:03 +00:00
|
|
|
from homeassistant.util import convert
|
2018-01-24 08:27:58 +00:00
|
|
|
import homeassistant.util.dt as dt_util
|
2016-08-21 21:36:44 +00:00
|
|
|
import homeassistant.helpers.config_validation as cv
|
2017-03-04 17:13:24 +00:00
|
|
|
from homeassistant.helpers.dispatcher import (
|
|
|
|
async_dispatcher_connect, async_dispatcher_send)
|
2017-02-16 23:19:22 +00:00
|
|
|
|
2016-09-30 15:43:18 +00:00
|
|
|
from . import const
|
2018-10-25 20:15:20 +00:00
|
|
|
from . import config_flow # noqa pylint: disable=unused-import
|
2019-07-11 02:50:43 +00:00
|
|
|
from . import websocket_api as wsapi
|
2018-10-09 14:30:55 +00:00
|
|
|
from .const import (
|
|
|
|
CONF_AUTOHEAL, CONF_DEBUG, CONF_POLLING_INTERVAL,
|
|
|
|
CONF_USB_STICK_PATH, CONF_CONFIG_PATH, CONF_NETWORK_KEY,
|
|
|
|
DEFAULT_CONF_AUTOHEAL, DEFAULT_CONF_USB_STICK_PATH,
|
|
|
|
DEFAULT_POLLING_INTERVAL, DEFAULT_DEBUG, DOMAIN,
|
|
|
|
DATA_DEVICES, DATA_NETWORK, DATA_ENTITY_VALUES)
|
2017-03-23 15:37:20 +00:00
|
|
|
from .node_entity import ZWaveBaseEntity, ZWaveNodeEntity
|
2017-02-08 04:37:11 +00:00
|
|
|
from . import workaround
|
2017-03-14 23:55:33 +00:00
|
|
|
from .discovery_schemas import DISCOVERY_SCHEMAS
|
2019-07-07 05:36:57 +00:00
|
|
|
from .util import (
|
|
|
|
check_node_schema, check_value_schema, node_name, check_has_unique_id,
|
|
|
|
is_node_parsed, node_device_id_and_name)
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2016-10-05 12:40:08 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2017-02-09 12:40:35 +00:00
|
|
|
CLASS_ID = 'class_id'
|
2018-10-09 14:30:55 +00:00
|
|
|
|
|
|
|
ATTR_POWER = 'power_consumption'
|
|
|
|
|
2016-10-05 12:40:08 +00:00
|
|
|
CONF_POLLING_INTENSITY = 'polling_intensity'
|
2016-10-14 15:36:55 +00:00
|
|
|
CONF_IGNORED = 'ignored'
|
2017-03-28 20:01:29 +00:00
|
|
|
CONF_INVERT_OPENCLOSE_BUTTONS = 'invert_openclose_buttons'
|
2019-06-17 21:44:47 +00:00
|
|
|
CONF_INVERT_PERCENT = 'invert_percent'
|
2016-11-18 20:59:01 +00:00
|
|
|
CONF_REFRESH_VALUE = 'refresh_value'
|
|
|
|
CONF_REFRESH_DELAY = 'delay'
|
2017-02-13 07:55:27 +00:00
|
|
|
CONF_DEVICE_CONFIG = 'device_config'
|
2017-02-16 23:19:22 +00:00
|
|
|
CONF_DEVICE_CONFIG_GLOB = 'device_config_glob'
|
|
|
|
CONF_DEVICE_CONFIG_DOMAIN = 'device_config_domain'
|
2016-10-05 12:40:08 +00:00
|
|
|
|
2018-10-09 14:30:55 +00:00
|
|
|
DATA_ZWAVE_CONFIG = 'zwave_config'
|
2017-02-19 00:51:13 +00:00
|
|
|
|
2016-10-14 15:36:55 +00:00
|
|
|
DEFAULT_CONF_IGNORED = False
|
2017-03-28 20:01:29 +00:00
|
|
|
DEFAULT_CONF_INVERT_OPENCLOSE_BUTTONS = False
|
2019-06-17 21:44:47 +00:00
|
|
|
DEFAULT_CONF_INVERT_PERCENT = False
|
2016-11-18 20:59:01 +00:00
|
|
|
DEFAULT_CONF_REFRESH_VALUE = False
|
2017-02-20 14:49:34 +00:00
|
|
|
DEFAULT_CONF_REFRESH_DELAY = 5
|
2016-10-05 12:40:08 +00:00
|
|
|
|
2018-10-24 09:53:45 +00:00
|
|
|
SUPPORTED_PLATFORMS = ['binary_sensor', 'climate', 'cover', 'fan',
|
2018-11-20 13:59:34 +00:00
|
|
|
'lock', 'light', 'sensor', 'switch']
|
2018-10-16 12:58:25 +00:00
|
|
|
|
2016-08-21 21:36:44 +00:00
|
|
|
RENAME_NODE_SCHEMA = vol.Schema({
|
2017-04-05 12:11:37 +00:00
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
2016-09-30 15:43:18 +00:00
|
|
|
vol.Required(const.ATTR_NAME): cv.string,
|
2019-07-01 22:54:19 +00:00
|
|
|
vol.Optional(const.ATTR_UPDATE_IDS, default=False): cv.boolean,
|
2016-08-21 21:36:44 +00:00
|
|
|
})
|
2017-06-03 06:03:00 +00:00
|
|
|
|
|
|
|
RENAME_VALUE_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_NAME): cv.string,
|
2019-07-01 22:54:19 +00:00
|
|
|
vol.Optional(const.ATTR_UPDATE_IDS, default=False): cv.boolean,
|
2017-06-03 06:03:00 +00:00
|
|
|
})
|
|
|
|
|
2016-10-05 05:04:19 +00:00
|
|
|
SET_CONFIG_PARAMETER_SCHEMA = vol.Schema({
|
2016-10-14 06:45:00 +00:00
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
2016-10-05 05:04:19 +00:00
|
|
|
vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int),
|
2017-05-19 00:39:31 +00:00
|
|
|
vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(vol.Coerce(int), cv.string),
|
|
|
|
vol.Optional(const.ATTR_CONFIG_SIZE, default=2): vol.Coerce(int)
|
2016-10-05 05:04:19 +00:00
|
|
|
})
|
2017-06-03 06:03:00 +00:00
|
|
|
|
2018-09-27 10:34:42 +00:00
|
|
|
SET_NODE_VALUE_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_CONFIG_VALUE): vol.Coerce(int)
|
|
|
|
})
|
|
|
|
|
|
|
|
REFRESH_NODE_VALUE_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int)
|
|
|
|
})
|
|
|
|
|
2017-09-11 18:30:48 +00:00
|
|
|
SET_POLL_INTENSITY_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_POLL_INTENSITY): vol.Coerce(int),
|
|
|
|
})
|
|
|
|
|
2017-01-01 20:10:45 +00:00
|
|
|
PRINT_CONFIG_PARAMETER_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int),
|
|
|
|
})
|
|
|
|
|
2017-03-02 11:36:40 +00:00
|
|
|
NODE_SERVICE_SCHEMA = vol.Schema({
|
2017-02-17 20:03:55 +00:00
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
})
|
|
|
|
|
2017-03-04 17:13:24 +00:00
|
|
|
REFRESH_ENTITY_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
|
|
|
})
|
|
|
|
|
2017-05-21 18:15:24 +00:00
|
|
|
RESET_NODE_METERS_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Optional(const.ATTR_INSTANCE, default=1): vol.Coerce(int)
|
|
|
|
})
|
|
|
|
|
2016-10-16 18:36:06 +00:00
|
|
|
CHANGE_ASSOCIATION_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(const.ATTR_ASSOCIATION): cv.string,
|
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_TARGET_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_GROUP): vol.Coerce(int),
|
|
|
|
vol.Optional(const.ATTR_INSTANCE, default=0x00): vol.Coerce(int)
|
|
|
|
})
|
2016-08-21 21:36:44 +00:00
|
|
|
|
2017-02-10 16:54:48 +00:00
|
|
|
SET_WAKEUP_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Required(const.ATTR_CONFIG_VALUE):
|
|
|
|
vol.All(vol.Coerce(int), cv.positive_int),
|
|
|
|
})
|
|
|
|
|
2017-11-05 17:19:19 +00:00
|
|
|
HEAL_NODE_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Optional(const.ATTR_RETURN_ROUTES, default=False): cv.boolean,
|
|
|
|
})
|
|
|
|
|
|
|
|
TEST_NODE_SCHEMA = vol.Schema({
|
|
|
|
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
|
|
|
|
vol.Optional(const.ATTR_MESSAGES, default=1): cv.positive_int,
|
|
|
|
})
|
|
|
|
|
|
|
|
|
2017-02-14 05:34:36 +00:00
|
|
|
DEVICE_CONFIG_SCHEMA_ENTRY = vol.Schema({
|
2017-01-28 20:29:51 +00:00
|
|
|
vol.Optional(CONF_POLLING_INTENSITY): cv.positive_int,
|
2016-10-14 15:36:55 +00:00
|
|
|
vol.Optional(CONF_IGNORED, default=DEFAULT_CONF_IGNORED): cv.boolean,
|
2017-03-28 20:01:29 +00:00
|
|
|
vol.Optional(CONF_INVERT_OPENCLOSE_BUTTONS,
|
|
|
|
default=DEFAULT_CONF_INVERT_OPENCLOSE_BUTTONS): cv.boolean,
|
2019-06-17 21:44:47 +00:00
|
|
|
vol.Optional(CONF_INVERT_PERCENT,
|
|
|
|
default=DEFAULT_CONF_INVERT_PERCENT): cv.boolean,
|
2016-11-18 20:59:01 +00:00
|
|
|
vol.Optional(CONF_REFRESH_VALUE, default=DEFAULT_CONF_REFRESH_VALUE):
|
|
|
|
cv.boolean,
|
|
|
|
vol.Optional(CONF_REFRESH_DELAY, default=DEFAULT_CONF_REFRESH_DELAY):
|
|
|
|
cv.positive_int
|
2016-10-05 12:40:08 +00:00
|
|
|
})
|
|
|
|
|
2017-03-04 17:13:24 +00:00
|
|
|
SIGNAL_REFRESH_ENTITY_FORMAT = 'zwave_refresh_entity_{}'
|
|
|
|
|
2016-10-05 12:40:08 +00:00
|
|
|
CONFIG_SCHEMA = vol.Schema({
|
|
|
|
DOMAIN: vol.Schema({
|
|
|
|
vol.Optional(CONF_AUTOHEAL, default=DEFAULT_CONF_AUTOHEAL): cv.boolean,
|
|
|
|
vol.Optional(CONF_CONFIG_PATH): cv.string,
|
2019-04-08 13:18:52 +00:00
|
|
|
vol.Optional(CONF_NETWORK_KEY):
|
|
|
|
vol.All(cv.string, vol.Match(r'(0x\w\w,\s?){15}0x\w\w')),
|
2017-02-13 07:55:27 +00:00
|
|
|
vol.Optional(CONF_DEVICE_CONFIG, default={}):
|
2017-02-14 05:34:36 +00:00
|
|
|
vol.Schema({cv.entity_id: DEVICE_CONFIG_SCHEMA_ENTRY}),
|
2017-02-16 23:19:22 +00:00
|
|
|
vol.Optional(CONF_DEVICE_CONFIG_GLOB, default={}):
|
2017-04-30 17:55:03 +00:00
|
|
|
vol.Schema({cv.string: DEVICE_CONFIG_SCHEMA_ENTRY}),
|
2017-02-16 23:19:22 +00:00
|
|
|
vol.Optional(CONF_DEVICE_CONFIG_DOMAIN, default={}):
|
|
|
|
vol.Schema({cv.string: DEVICE_CONFIG_SCHEMA_ENTRY}),
|
2017-01-02 17:45:44 +00:00
|
|
|
vol.Optional(CONF_DEBUG, default=DEFAULT_DEBUG): cv.boolean,
|
2016-10-05 12:40:08 +00:00
|
|
|
vol.Optional(CONF_POLLING_INTERVAL, default=DEFAULT_POLLING_INTERVAL):
|
|
|
|
cv.positive_int,
|
2019-03-14 19:29:21 +00:00
|
|
|
vol.Optional(CONF_USB_STICK_PATH): cv.string,
|
2016-10-05 12:40:08 +00:00
|
|
|
}),
|
|
|
|
}, extra=vol.ALLOW_EXTRA)
|
2016-03-27 18:29:39 +00:00
|
|
|
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2015-02-26 07:27:17 +00:00
|
|
|
def _obj_to_dict(obj):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Convert an object into a hash for debug."""
|
2015-02-26 07:27:17 +00:00
|
|
|
return {key: getattr(obj, key) for key
|
|
|
|
in dir(obj)
|
2018-01-30 22:44:05 +00:00
|
|
|
if key[0] != '_' and not callable(getattr(obj, key))}
|
2015-02-23 01:36:28 +00:00
|
|
|
|
|
|
|
|
2016-02-03 11:52:35 +00:00
|
|
|
def _value_name(value):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Return the name of the value."""
|
2017-06-16 17:07:17 +00:00
|
|
|
return '{} {}'.format(node_name(value.node), value.label).strip()
|
2016-02-03 11:52:35 +00:00
|
|
|
|
|
|
|
|
2015-02-23 08:01:04 +00:00
|
|
|
def nice_print_node(node):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Print a nice formatted node to the output (debug method)."""
|
2015-02-26 07:27:17 +00:00
|
|
|
node_dict = _obj_to_dict(node)
|
|
|
|
node_dict['values'] = {value_id: _obj_to_dict(value)
|
|
|
|
for value_id, value in node.values.items()}
|
2015-02-23 08:01:04 +00:00
|
|
|
|
2018-04-05 09:14:15 +00:00
|
|
|
_LOGGER.info("FOUND NODE %s \n"
|
|
|
|
"%s", node.product_name, node_dict)
|
2015-02-23 08:01:04 +00:00
|
|
|
|
|
|
|
|
2017-02-12 23:42:09 +00:00
|
|
|
def get_config_value(node, value_index, tries=5):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Return the current configuration value for a specific index."""
|
2015-10-31 22:34:19 +00:00
|
|
|
try:
|
|
|
|
for value in node.values.values():
|
2017-04-07 13:17:23 +00:00
|
|
|
if (value.command_class == const.COMMAND_CLASS_CONFIGURATION
|
|
|
|
and value.index == value_index):
|
2015-10-31 22:34:19 +00:00
|
|
|
return value.data
|
|
|
|
except RuntimeError:
|
2018-01-27 19:58:27 +00:00
|
|
|
# If we get a runtime error the dict has changed while
|
2015-10-31 22:34:19 +00:00
|
|
|
# we was looking for a value, just do it again
|
2017-02-12 23:42:09 +00:00
|
|
|
return None if tries <= 0 else get_config_value(
|
|
|
|
node, value_index, tries=tries - 1)
|
|
|
|
return None
|
2015-10-31 20:23:33 +00:00
|
|
|
|
|
|
|
|
2018-08-24 14:37:30 +00:00
|
|
|
async def async_setup_platform(hass, config, async_add_entities,
|
2018-03-17 03:27:05 +00:00
|
|
|
discovery_info=None):
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Set up the Z-Wave platform (generic part)."""
|
2017-05-26 04:11:02 +00:00
|
|
|
if discovery_info is None or DATA_NETWORK not in hass.data:
|
2017-02-23 21:06:28 +00:00
|
|
|
return False
|
2017-04-10 20:11:39 +00:00
|
|
|
|
2018-11-01 18:38:23 +00:00
|
|
|
device = hass.data[DATA_DEVICES].get(
|
2017-04-10 20:11:39 +00:00
|
|
|
discovery_info[const.DISCOVERY_DEVICE], None)
|
|
|
|
if device is None:
|
2017-02-23 21:06:28 +00:00
|
|
|
return False
|
2018-11-01 18:38:23 +00:00
|
|
|
|
2018-08-24 14:37:30 +00:00
|
|
|
async_add_entities([device])
|
2017-04-10 20:11:39 +00:00
|
|
|
return True
|
|
|
|
|
2017-02-23 21:06:28 +00:00
|
|
|
|
2018-05-12 21:45:36 +00:00
|
|
|
async def async_setup(hass, config):
|
2018-10-09 14:30:55 +00:00
|
|
|
"""Set up Z-Wave components."""
|
|
|
|
if DOMAIN not in config:
|
|
|
|
return True
|
|
|
|
|
|
|
|
conf = config[DOMAIN]
|
|
|
|
hass.data[DATA_ZWAVE_CONFIG] = conf
|
|
|
|
|
|
|
|
if not hass.config_entries.async_entries(DOMAIN):
|
|
|
|
hass.async_create_task(hass.config_entries.flow.async_init(
|
|
|
|
DOMAIN, context={'source': config_entries.SOURCE_IMPORT},
|
|
|
|
data={
|
2019-03-14 19:29:21 +00:00
|
|
|
CONF_USB_STICK_PATH: conf.get(
|
|
|
|
CONF_USB_STICK_PATH, DEFAULT_CONF_USB_STICK_PATH),
|
2018-10-09 14:30:55 +00:00
|
|
|
CONF_NETWORK_KEY: conf.get(CONF_NETWORK_KEY),
|
|
|
|
}
|
|
|
|
))
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
async def async_setup_entry(hass, config_entry):
|
|
|
|
"""Set up Z-Wave from a config entry.
|
2016-03-08 16:55:57 +00:00
|
|
|
|
2015-02-23 01:36:28 +00:00
|
|
|
Will automatically load components to support devices found on the network.
|
|
|
|
"""
|
2015-03-01 06:49:27 +00:00
|
|
|
from pydispatch import dispatcher
|
2017-04-12 17:09:29 +00:00
|
|
|
# pylint: disable=import-error
|
2015-02-23 01:36:28 +00:00
|
|
|
from openzwave.option import ZWaveOption
|
|
|
|
from openzwave.network import ZWaveNetwork
|
2016-10-16 18:36:06 +00:00
|
|
|
from openzwave.group import ZWaveGroup
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2018-10-09 14:30:55 +00:00
|
|
|
config = {}
|
|
|
|
if DATA_ZWAVE_CONFIG in hass.data:
|
|
|
|
config = hass.data[DATA_ZWAVE_CONFIG]
|
|
|
|
|
2016-02-03 11:52:35 +00:00
|
|
|
# Load configuration
|
2018-10-09 14:30:55 +00:00
|
|
|
use_debug = config.get(CONF_DEBUG, DEFAULT_DEBUG)
|
|
|
|
autoheal = config.get(CONF_AUTOHEAL,
|
|
|
|
DEFAULT_CONF_AUTOHEAL)
|
2017-02-23 21:06:28 +00:00
|
|
|
device_config = EntityValues(
|
2018-10-09 14:30:55 +00:00
|
|
|
config.get(CONF_DEVICE_CONFIG),
|
|
|
|
config.get(CONF_DEVICE_CONFIG_DOMAIN),
|
|
|
|
config.get(CONF_DEVICE_CONFIG_GLOB))
|
2015-02-23 08:01:04 +00:00
|
|
|
|
2019-03-14 19:29:21 +00:00
|
|
|
usb_path = config.get(
|
|
|
|
CONF_USB_STICK_PATH, config_entry.data[CONF_USB_STICK_PATH])
|
|
|
|
|
|
|
|
_LOGGER.info('Z-Wave USB path is %s', usb_path)
|
|
|
|
|
2015-02-23 01:36:28 +00:00
|
|
|
# Setup options
|
|
|
|
options = ZWaveOption(
|
2019-03-14 19:29:21 +00:00
|
|
|
usb_path,
|
2015-11-20 04:35:18 +00:00
|
|
|
user_path=hass.config.config_dir,
|
2018-10-09 14:30:55 +00:00
|
|
|
config_path=config.get(CONF_CONFIG_PATH))
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2015-02-23 08:01:04 +00:00
|
|
|
options.set_console_output(use_debug)
|
2017-05-19 06:49:15 +00:00
|
|
|
|
2019-01-11 01:45:50 +00:00
|
|
|
if config_entry.data.get(CONF_NETWORK_KEY):
|
2018-10-09 14:30:55 +00:00
|
|
|
options.addOption("NetworkKey", config_entry.data[CONF_NETWORK_KEY])
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2018-10-09 14:30:55 +00:00
|
|
|
await hass.async_add_executor_job(options.lock)
|
2017-05-26 04:11:02 +00:00
|
|
|
network = hass.data[DATA_NETWORK] = ZWaveNetwork(options, autostart=False)
|
|
|
|
hass.data[DATA_DEVICES] = {}
|
2017-06-03 06:03:00 +00:00
|
|
|
hass.data[DATA_ENTITY_VALUES] = []
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2019-06-03 16:40:40 +00:00
|
|
|
registry = await async_get_registry(hass)
|
|
|
|
|
2019-07-11 02:50:43 +00:00
|
|
|
wsapi.async_load_websocket_api(hass)
|
|
|
|
|
2017-04-10 20:11:39 +00:00
|
|
|
if use_debug: # pragma: no cover
|
2015-02-26 07:27:17 +00:00
|
|
|
def log_all(signal, value=None):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Log all the signals."""
|
2015-02-23 08:01:04 +00:00
|
|
|
print("")
|
2015-03-19 02:15:48 +00:00
|
|
|
print("SIGNAL *****", signal)
|
2015-02-26 07:27:17 +00:00
|
|
|
if value and signal in (ZWaveNetwork.SIGNAL_VALUE_CHANGED,
|
2016-07-29 19:56:03 +00:00
|
|
|
ZWaveNetwork.SIGNAL_VALUE_ADDED,
|
|
|
|
ZWaveNetwork.SIGNAL_SCENE_EVENT,
|
2016-08-08 14:52:28 +00:00
|
|
|
ZWaveNetwork.SIGNAL_NODE_EVENT,
|
|
|
|
ZWaveNetwork.SIGNAL_AWAKE_NODES_QUERIED,
|
2018-09-28 17:14:57 +00:00
|
|
|
ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED,
|
|
|
|
ZWaveNetwork
|
|
|
|
.SIGNAL_ALL_NODES_QUERIED_SOME_DEAD):
|
2015-02-26 07:27:17 +00:00
|
|
|
pprint(_obj_to_dict(value))
|
2016-02-03 11:52:35 +00:00
|
|
|
|
2015-02-23 08:01:04 +00:00
|
|
|
print("")
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2015-03-01 06:49:27 +00:00
|
|
|
dispatcher.connect(log_all, weak=False)
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2015-02-26 07:27:17 +00:00
|
|
|
def value_added(node, value):
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Handle new added value to a node on the network."""
|
2017-03-14 23:55:33 +00:00
|
|
|
# Check if this value should be tracked by an existing entity
|
2017-06-03 06:03:00 +00:00
|
|
|
for values in hass.data[DATA_ENTITY_VALUES]:
|
2017-03-14 23:55:33 +00:00
|
|
|
values.check_value(value)
|
2017-03-13 06:35:10 +00:00
|
|
|
|
2017-03-14 23:55:33 +00:00
|
|
|
for schema in DISCOVERY_SCHEMAS:
|
|
|
|
if not check_node_schema(node, schema):
|
|
|
|
continue
|
|
|
|
if not check_value_schema(
|
|
|
|
value,
|
2017-03-23 15:37:20 +00:00
|
|
|
schema[const.DISC_VALUES][const.DISC_PRIMARY]):
|
2017-03-13 06:35:10 +00:00
|
|
|
continue
|
|
|
|
|
2017-03-14 23:55:33 +00:00
|
|
|
values = ZWaveDeviceEntityValues(
|
2018-05-12 21:45:36 +00:00
|
|
|
hass, schema, value, config, device_config, registry)
|
2017-06-03 06:03:00 +00:00
|
|
|
|
|
|
|
# We create a new list and update the reference here so that
|
|
|
|
# the list can be safely iterated over in the main thread
|
|
|
|
new_values = hass.data[DATA_ENTITY_VALUES] + [values]
|
|
|
|
hass.data[DATA_ENTITY_VALUES] = new_values
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2019-06-03 16:40:40 +00:00
|
|
|
platform = EntityPlatform(
|
|
|
|
hass=hass,
|
|
|
|
logger=_LOGGER,
|
|
|
|
domain=DOMAIN,
|
|
|
|
platform_name=DOMAIN,
|
|
|
|
platform=None,
|
|
|
|
scan_interval=DEFAULT_SCAN_INTERVAL,
|
|
|
|
entity_namespace=None,
|
|
|
|
async_entities_added_callback=lambda: None,
|
|
|
|
)
|
|
|
|
platform.config_entry = config_entry
|
2017-03-23 15:37:20 +00:00
|
|
|
|
|
|
|
def node_added(node):
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Handle a new node on the network."""
|
2018-03-11 14:30:03 +00:00
|
|
|
entity = ZWaveNodeEntity(node, network)
|
2018-05-02 20:10:26 +00:00
|
|
|
|
2019-06-03 16:40:40 +00:00
|
|
|
async def _add_node_to_component():
|
2018-11-01 18:38:23 +00:00
|
|
|
if hass.data[DATA_DEVICES].get(entity.unique_id):
|
|
|
|
return
|
|
|
|
|
2018-05-02 20:10:26 +00:00
|
|
|
name = node_name(node)
|
|
|
|
generated_id = generate_entity_id(DOMAIN + '.{}', name, [])
|
|
|
|
node_config = device_config.get(generated_id)
|
|
|
|
if node_config.get(CONF_IGNORED):
|
|
|
|
_LOGGER.info(
|
|
|
|
"Ignoring node entity %s due to device settings",
|
|
|
|
generated_id)
|
|
|
|
return
|
2018-11-01 18:38:23 +00:00
|
|
|
|
|
|
|
hass.data[DATA_DEVICES][entity.unique_id] = entity
|
2019-06-03 16:40:40 +00:00
|
|
|
await platform.async_add_entities([entity])
|
2018-05-02 20:10:26 +00:00
|
|
|
|
|
|
|
if entity.unique_id:
|
2019-06-03 16:40:40 +00:00
|
|
|
hass.async_add_job(_add_node_to_component())
|
2017-03-23 15:37:20 +00:00
|
|
|
return
|
2018-05-02 20:10:26 +00:00
|
|
|
|
2018-05-08 19:30:28 +00:00
|
|
|
@callback
|
|
|
|
def _on_ready(sec):
|
|
|
|
_LOGGER.info("Z-Wave node %d ready after %d seconds",
|
|
|
|
entity.node_id, sec)
|
|
|
|
hass.async_add_job(_add_node_to_component)
|
2018-05-02 20:10:26 +00:00
|
|
|
|
2018-05-08 19:30:28 +00:00
|
|
|
@callback
|
|
|
|
def _on_timeout(sec):
|
|
|
|
_LOGGER.warning(
|
|
|
|
"Z-Wave node %d not ready after %d seconds, "
|
|
|
|
"continuing anyway",
|
|
|
|
entity.node_id, sec)
|
2018-05-02 20:10:26 +00:00
|
|
|
hass.async_add_job(_add_node_to_component)
|
|
|
|
|
2019-07-01 22:54:19 +00:00
|
|
|
hass.add_job(check_has_unique_id, entity, _on_ready, _on_timeout)
|
2017-03-23 15:37:20 +00:00
|
|
|
|
2019-05-19 09:14:11 +00:00
|
|
|
def node_removed(node):
|
|
|
|
node_id = node.node_id
|
|
|
|
node_key = 'node-{}'.format(node_id)
|
|
|
|
_LOGGER.info("Node Removed: %s",
|
|
|
|
hass.data[DATA_DEVICES][node_key])
|
|
|
|
for key in list(hass.data[DATA_DEVICES]):
|
|
|
|
if not key.startswith('{}-'.format(node_id)):
|
|
|
|
continue
|
|
|
|
|
|
|
|
entity = hass.data[DATA_DEVICES][key]
|
|
|
|
_LOGGER.info('Removing Entity - value: %s - entity_id: %s',
|
|
|
|
key, entity.entity_id)
|
|
|
|
hass.add_job(entity.node_removed())
|
|
|
|
del hass.data[DATA_DEVICES][key]
|
|
|
|
|
|
|
|
entity = hass.data[DATA_DEVICES][node_key]
|
|
|
|
hass.add_job(entity.node_removed())
|
|
|
|
del hass.data[DATA_DEVICES][node_key]
|
|
|
|
|
2016-08-08 14:52:28 +00:00
|
|
|
def network_ready():
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Handle the query of all awake nodes."""
|
2019-03-14 19:29:21 +00:00
|
|
|
_LOGGER.info("Z-Wave network is ready for use. All awake nodes "
|
2017-05-02 20:47:20 +00:00
|
|
|
"have been queried. Sleeping nodes will be "
|
|
|
|
"queried when they awake.")
|
2016-09-30 15:43:18 +00:00
|
|
|
hass.bus.fire(const.EVENT_NETWORK_READY)
|
2016-08-08 14:52:28 +00:00
|
|
|
|
|
|
|
def network_complete():
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Handle the querying of all nodes on network."""
|
|
|
|
_LOGGER.info("Z-Wave network is complete. All nodes on the network "
|
|
|
|
"have been queried")
|
2016-09-30 15:43:18 +00:00
|
|
|
hass.bus.fire(const.EVENT_NETWORK_COMPLETE)
|
2016-08-08 14:52:28 +00:00
|
|
|
|
2018-09-28 17:14:57 +00:00
|
|
|
def network_complete_some_dead():
|
|
|
|
"""Handle the querying of all nodes on network."""
|
|
|
|
_LOGGER.info("Z-Wave network is complete. All nodes on the network "
|
2018-10-25 13:58:09 +00:00
|
|
|
"have been queried, but some nodes are marked dead")
|
2018-09-28 17:14:57 +00:00
|
|
|
hass.bus.fire(const.EVENT_NETWORK_COMPLETE_SOME_DEAD)
|
|
|
|
|
2015-03-01 06:49:27 +00:00
|
|
|
dispatcher.connect(
|
2015-02-26 07:27:17 +00:00
|
|
|
value_added, ZWaveNetwork.SIGNAL_VALUE_ADDED, weak=False)
|
2017-03-23 15:37:20 +00:00
|
|
|
dispatcher.connect(
|
|
|
|
node_added, ZWaveNetwork.SIGNAL_NODE_ADDED, weak=False)
|
2019-05-19 09:14:11 +00:00
|
|
|
dispatcher.connect(
|
|
|
|
node_removed, ZWaveNetwork.SIGNAL_NODE_REMOVED, weak=False)
|
2016-08-08 14:52:28 +00:00
|
|
|
dispatcher.connect(
|
|
|
|
network_ready, ZWaveNetwork.SIGNAL_AWAKE_NODES_QUERIED, weak=False)
|
|
|
|
dispatcher.connect(
|
|
|
|
network_complete, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED, weak=False)
|
2018-09-28 17:14:57 +00:00
|
|
|
dispatcher.connect(
|
|
|
|
network_complete_some_dead,
|
|
|
|
ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED_SOME_DEAD, weak=False)
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2016-06-11 05:53:31 +00:00
|
|
|
def add_node(service):
|
2016-03-07 17:49:31 +00:00
|
|
|
"""Switch into inclusion mode."""
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Z-Wave add_node have been initialized")
|
2017-04-12 17:09:29 +00:00
|
|
|
network.controller.add_node()
|
2016-01-10 13:44:46 +00:00
|
|
|
|
2016-08-02 18:17:10 +00:00
|
|
|
def add_node_secure(service):
|
|
|
|
"""Switch into secure inclusion mode."""
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Z-Wave add_node_secure have been initialized")
|
2017-04-12 17:09:29 +00:00
|
|
|
network.controller.add_node(True)
|
2016-08-02 18:17:10 +00:00
|
|
|
|
2016-06-11 05:53:31 +00:00
|
|
|
def remove_node(service):
|
2016-03-07 17:49:31 +00:00
|
|
|
"""Switch into exclusion mode."""
|
2018-11-27 18:09:25 +00:00
|
|
|
_LOGGER.info("Z-Wave remove_node have been initialized")
|
2017-04-12 17:09:29 +00:00
|
|
|
network.controller.remove_node()
|
2016-01-10 13:44:46 +00:00
|
|
|
|
2016-08-08 14:52:28 +00:00
|
|
|
def cancel_command(service):
|
|
|
|
"""Cancel a running controller command."""
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Cancel running Z-Wave command")
|
2017-04-12 17:09:29 +00:00
|
|
|
network.controller.cancel_command()
|
2016-08-08 14:52:28 +00:00
|
|
|
|
2016-06-11 05:53:31 +00:00
|
|
|
def heal_network(service):
|
2016-03-13 17:51:09 +00:00
|
|
|
"""Heal the network."""
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Z-Wave heal running")
|
2017-04-12 17:09:29 +00:00
|
|
|
network.heal()
|
2016-03-13 17:51:09 +00:00
|
|
|
|
2016-06-11 05:53:31 +00:00
|
|
|
def soft_reset(service):
|
2016-03-13 17:51:09 +00:00
|
|
|
"""Soft reset the controller."""
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Z-Wave soft_reset have been initialized")
|
2017-04-12 17:09:29 +00:00
|
|
|
network.controller.soft_reset()
|
2016-03-13 17:51:09 +00:00
|
|
|
|
2018-05-02 19:06:09 +00:00
|
|
|
def update_config(service):
|
|
|
|
"""Update the config from git."""
|
|
|
|
_LOGGER.info("Configuration update has been initialized")
|
|
|
|
network.controller.update_ozw_config()
|
|
|
|
|
2016-06-11 05:53:31 +00:00
|
|
|
def test_network(service):
|
2016-03-30 08:29:11 +00:00
|
|
|
"""Test the network by sending commands to all the nodes."""
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Z-Wave test_network have been initialized")
|
2017-04-12 17:09:29 +00:00
|
|
|
network.test()
|
2016-03-30 08:29:11 +00:00
|
|
|
|
2017-04-07 13:17:23 +00:00
|
|
|
def stop_network(_service_or_event):
|
2016-08-08 14:52:28 +00:00
|
|
|
"""Stop Z-Wave network."""
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Stopping Z-Wave network")
|
2017-04-12 17:09:29 +00:00
|
|
|
network.stop()
|
2017-04-10 20:11:39 +00:00
|
|
|
if hass.state == CoreState.running:
|
2016-10-22 12:08:24 +00:00
|
|
|
hass.bus.fire(const.EVENT_NETWORK_STOP)
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2019-07-07 05:36:57 +00:00
|
|
|
async def rename_node(service):
|
2016-08-21 21:36:44 +00:00
|
|
|
"""Rename a node."""
|
2017-04-05 12:11:37 +00:00
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
2017-05-10 01:56:41 +00:00
|
|
|
node = network.nodes[node_id]
|
2016-09-30 15:43:18 +00:00
|
|
|
name = service.data.get(const.ATTR_NAME)
|
2016-08-21 21:36:44 +00:00
|
|
|
node.name = name
|
|
|
|
_LOGGER.info(
|
2017-05-02 20:47:20 +00:00
|
|
|
"Renamed Z-Wave node %d to %s", node_id, name)
|
2019-07-01 22:54:19 +00:00
|
|
|
update_ids = service.data.get(const.ATTR_UPDATE_IDS)
|
|
|
|
# We want to rename the device, the node entity,
|
|
|
|
# and all the contained entities
|
|
|
|
node_key = 'node-{}'.format(node_id)
|
|
|
|
entity = hass.data[DATA_DEVICES][node_key]
|
2019-07-07 05:36:57 +00:00
|
|
|
await entity.node_renamed(update_ids)
|
2019-07-01 22:54:19 +00:00
|
|
|
for key in list(hass.data[DATA_DEVICES]):
|
|
|
|
if not key.startswith('{}-'.format(node_id)):
|
|
|
|
continue
|
|
|
|
entity = hass.data[DATA_DEVICES][key]
|
2019-07-07 05:36:57 +00:00
|
|
|
await entity.value_renamed(update_ids)
|
2016-08-21 21:36:44 +00:00
|
|
|
|
2019-07-07 05:36:57 +00:00
|
|
|
async def rename_value(service):
|
2017-06-03 06:03:00 +00:00
|
|
|
"""Rename a node value."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
|
|
|
value_id = service.data.get(const.ATTR_VALUE_ID)
|
|
|
|
node = network.nodes[node_id]
|
|
|
|
value = node.values[value_id]
|
|
|
|
name = service.data.get(const.ATTR_NAME)
|
|
|
|
value.label = name
|
|
|
|
_LOGGER.info(
|
|
|
|
"Renamed Z-Wave value (Node %d Value %d) to %s",
|
|
|
|
node_id, value_id, name)
|
2019-07-01 22:54:19 +00:00
|
|
|
update_ids = service.data.get(const.ATTR_UPDATE_IDS)
|
|
|
|
value_key = '{}-{}'.format(node_id, value_id)
|
|
|
|
entity = hass.data[DATA_DEVICES][value_key]
|
2019-07-07 05:36:57 +00:00
|
|
|
await entity.value_renamed(update_ids)
|
2017-06-03 06:03:00 +00:00
|
|
|
|
2017-09-11 18:30:48 +00:00
|
|
|
def set_poll_intensity(service):
|
|
|
|
"""Set the polling intensity of a node value."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
|
|
|
value_id = service.data.get(const.ATTR_VALUE_ID)
|
|
|
|
node = network.nodes[node_id]
|
|
|
|
value = node.values[value_id]
|
|
|
|
intensity = service.data.get(const.ATTR_POLL_INTENSITY)
|
|
|
|
if intensity == 0:
|
|
|
|
if value.disable_poll():
|
|
|
|
_LOGGER.info("Polling disabled (Node %d Value %d)",
|
|
|
|
node_id, value_id)
|
|
|
|
return
|
|
|
|
_LOGGER.info("Polling disabled failed (Node %d Value %d)",
|
|
|
|
node_id, value_id)
|
|
|
|
else:
|
|
|
|
if value.enable_poll(intensity):
|
|
|
|
_LOGGER.info(
|
|
|
|
"Set polling intensity (Node %d Value %d) to %s",
|
|
|
|
node_id, value_id, intensity)
|
|
|
|
return
|
|
|
|
_LOGGER.info("Set polling intensity failed (Node %d Value %d)",
|
|
|
|
node_id, value_id)
|
|
|
|
|
2017-03-02 11:36:40 +00:00
|
|
|
def remove_failed_node(service):
|
|
|
|
"""Remove failed node."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Trying to remove zwave node %d", node_id)
|
2017-04-12 17:09:29 +00:00
|
|
|
network.controller.remove_failed_node(node_id)
|
2017-03-02 11:36:40 +00:00
|
|
|
|
|
|
|
def replace_failed_node(service):
|
|
|
|
"""Replace failed node."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Trying to replace zwave node %d", node_id)
|
2017-04-12 17:09:29 +00:00
|
|
|
network.controller.replace_failed_node(node_id)
|
2017-03-02 11:36:40 +00:00
|
|
|
|
2016-10-05 05:04:19 +00:00
|
|
|
def set_config_parameter(service):
|
|
|
|
"""Set a config parameter to a node."""
|
2016-10-14 06:45:00 +00:00
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
2017-04-12 17:09:29 +00:00
|
|
|
node = network.nodes[node_id]
|
2016-10-05 05:04:19 +00:00
|
|
|
param = service.data.get(const.ATTR_CONFIG_PARAMETER)
|
2017-01-02 17:53:46 +00:00
|
|
|
selection = service.data.get(const.ATTR_CONFIG_VALUE)
|
2017-05-19 00:39:31 +00:00
|
|
|
size = service.data.get(const.ATTR_CONFIG_SIZE)
|
2017-01-02 17:53:46 +00:00
|
|
|
for value in (
|
|
|
|
node.get_values(class_id=const.COMMAND_CLASS_CONFIGURATION)
|
|
|
|
.values()):
|
2017-05-19 00:39:31 +00:00
|
|
|
if value.index != param:
|
|
|
|
continue
|
2019-04-15 21:24:20 +00:00
|
|
|
if value.type == const.TYPE_BOOL:
|
|
|
|
value.data = int(selection == 'True')
|
|
|
|
_LOGGER.info("Setting config parameter %s on Node %s "
|
|
|
|
"with bool selection %s", param, node_id,
|
|
|
|
str(selection))
|
|
|
|
return
|
|
|
|
if value.type == const.TYPE_LIST:
|
2018-03-20 13:04:24 +00:00
|
|
|
value.data = str(selection)
|
|
|
|
_LOGGER.info("Setting config parameter %s on Node %s "
|
2019-04-15 21:24:20 +00:00
|
|
|
"with list selection %s", param, node_id,
|
2018-03-20 13:04:24 +00:00
|
|
|
str(selection))
|
|
|
|
return
|
|
|
|
if value.type == const.TYPE_BUTTON:
|
|
|
|
network.manager.pressButton(value.value_id)
|
|
|
|
network.manager.releaseButton(value.value_id)
|
|
|
|
_LOGGER.info("Setting config parameter %s on Node %s "
|
|
|
|
"with button selection %s", param, node_id,
|
2017-05-19 00:39:31 +00:00
|
|
|
selection)
|
|
|
|
return
|
2017-07-06 03:02:16 +00:00
|
|
|
value.data = int(selection)
|
|
|
|
_LOGGER.info("Setting config parameter %s on Node %s "
|
|
|
|
"with selection %s", param, node_id,
|
|
|
|
selection)
|
|
|
|
return
|
2017-05-19 00:39:31 +00:00
|
|
|
node.set_config_param(param, selection, size)
|
|
|
|
_LOGGER.info("Setting unknown config parameter %s on Node %s "
|
|
|
|
"with selection %s", param, node_id,
|
|
|
|
selection)
|
2016-10-05 05:04:19 +00:00
|
|
|
|
2018-09-27 10:34:42 +00:00
|
|
|
def refresh_node_value(service):
|
|
|
|
"""Refresh the specified value from a node."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
|
|
|
value_id = service.data.get(const.ATTR_VALUE_ID)
|
|
|
|
node = network.nodes[node_id]
|
|
|
|
node.values[value_id].refresh()
|
|
|
|
_LOGGER.info("Node %s value %s refreshed", node_id, value_id)
|
|
|
|
|
|
|
|
def set_node_value(service):
|
|
|
|
"""Set the specified value on a node."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
|
|
|
value_id = service.data.get(const.ATTR_VALUE_ID)
|
|
|
|
value = service.data.get(const.ATTR_CONFIG_VALUE)
|
|
|
|
node = network.nodes[node_id]
|
|
|
|
node.values[value_id].data = value
|
|
|
|
_LOGGER.info("Node %s value %s set to %s", node_id, value_id, value)
|
|
|
|
|
2017-01-01 20:10:45 +00:00
|
|
|
def print_config_parameter(service):
|
|
|
|
"""Print a config parameter from a node."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
2017-04-12 17:09:29 +00:00
|
|
|
node = network.nodes[node_id]
|
2017-01-01 20:10:45 +00:00
|
|
|
param = service.data.get(const.ATTR_CONFIG_PARAMETER)
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Config parameter %s on Node %s: %s",
|
2017-01-01 20:10:45 +00:00
|
|
|
param, node_id, get_config_value(node, param))
|
|
|
|
|
2017-02-17 20:03:55 +00:00
|
|
|
def print_node(service):
|
|
|
|
"""Print all information about z-wave node."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
2017-04-12 17:09:29 +00:00
|
|
|
node = network.nodes[node_id]
|
2017-02-17 20:03:55 +00:00
|
|
|
nice_print_node(node)
|
|
|
|
|
2017-02-10 16:54:48 +00:00
|
|
|
def set_wakeup(service):
|
|
|
|
"""Set wake-up interval of a node."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
2017-04-12 17:09:29 +00:00
|
|
|
node = network.nodes[node_id]
|
2017-02-10 16:54:48 +00:00
|
|
|
value = service.data.get(const.ATTR_CONFIG_VALUE)
|
|
|
|
if node.can_wake_up():
|
|
|
|
for value_id in node.get_values(
|
|
|
|
class_id=const.COMMAND_CLASS_WAKE_UP):
|
|
|
|
node.values[value_id].data = value
|
|
|
|
_LOGGER.info("Node %s wake-up set to %d", node_id, value)
|
|
|
|
else:
|
|
|
|
_LOGGER.info("Node %s is not wakeable", node_id)
|
|
|
|
|
2016-10-16 18:36:06 +00:00
|
|
|
def change_association(service):
|
|
|
|
"""Change an association in the zwave network."""
|
|
|
|
association_type = service.data.get(const.ATTR_ASSOCIATION)
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
|
|
|
target_node_id = service.data.get(const.ATTR_TARGET_NODE_ID)
|
|
|
|
group = service.data.get(const.ATTR_GROUP)
|
|
|
|
instance = service.data.get(const.ATTR_INSTANCE)
|
|
|
|
|
2017-04-12 17:09:29 +00:00
|
|
|
node = ZWaveGroup(group, network, node_id)
|
2016-10-16 18:36:06 +00:00
|
|
|
if association_type == 'add':
|
|
|
|
node.add_association(target_node_id, instance)
|
|
|
|
_LOGGER.info("Adding association for node:%s in group:%s "
|
|
|
|
"target node:%s, instance=%s", node_id, group,
|
|
|
|
target_node_id, instance)
|
|
|
|
if association_type == 'remove':
|
|
|
|
node.remove_association(target_node_id, instance)
|
|
|
|
_LOGGER.info("Removing association for node:%s in group:%s "
|
|
|
|
"target node:%s, instance=%s", node_id, group,
|
|
|
|
target_node_id, instance)
|
|
|
|
|
2018-03-17 03:27:05 +00:00
|
|
|
async def async_refresh_entity(service):
|
2017-03-04 17:13:24 +00:00
|
|
|
"""Refresh values that specific entity depends on."""
|
|
|
|
entity_id = service.data.get(ATTR_ENTITY_ID)
|
|
|
|
async_dispatcher_send(
|
|
|
|
hass, SIGNAL_REFRESH_ENTITY_FORMAT.format(entity_id))
|
|
|
|
|
|
|
|
def refresh_node(service):
|
|
|
|
"""Refresh all node info."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
2017-04-12 17:09:29 +00:00
|
|
|
node = network.nodes[node_id]
|
2017-03-04 17:13:24 +00:00
|
|
|
node.refresh_info()
|
|
|
|
|
2017-05-21 18:15:24 +00:00
|
|
|
def reset_node_meters(service):
|
|
|
|
"""Reset meter counters of a node."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
|
|
|
instance = service.data.get(const.ATTR_INSTANCE)
|
|
|
|
node = network.nodes[node_id]
|
|
|
|
for value in (
|
|
|
|
node.get_values(class_id=const.COMMAND_CLASS_METER)
|
|
|
|
.values()):
|
2017-06-14 12:41:20 +00:00
|
|
|
if value.index != const.INDEX_METER_RESET:
|
2017-05-21 18:15:24 +00:00
|
|
|
continue
|
|
|
|
if value.instance != instance:
|
|
|
|
continue
|
|
|
|
network.manager.pressButton(value.value_id)
|
|
|
|
network.manager.releaseButton(value.value_id)
|
|
|
|
_LOGGER.info("Resetting meters on node %s instance %s....",
|
|
|
|
node_id, instance)
|
|
|
|
return
|
|
|
|
_LOGGER.info("Node %s on instance %s does not have resettable "
|
|
|
|
"meters.", node_id, instance)
|
|
|
|
|
2017-11-05 17:19:19 +00:00
|
|
|
def heal_node(service):
|
|
|
|
"""Heal a node on the network."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
|
|
|
update_return_routes = service.data.get(const.ATTR_RETURN_ROUTES)
|
|
|
|
node = network.nodes[node_id]
|
|
|
|
_LOGGER.info("Z-Wave node heal running for node %s", node_id)
|
|
|
|
node.heal(update_return_routes)
|
|
|
|
|
|
|
|
def test_node(service):
|
|
|
|
"""Send test messages to a node on the network."""
|
|
|
|
node_id = service.data.get(const.ATTR_NODE_ID)
|
|
|
|
messages = service.data.get(const.ATTR_MESSAGES)
|
|
|
|
node = network.nodes[node_id]
|
|
|
|
_LOGGER.info("Sending %s test-messages to node %s.", messages, node_id)
|
|
|
|
node.test(messages)
|
|
|
|
|
2016-08-02 17:08:04 +00:00
|
|
|
def start_zwave(_service_or_event):
|
2016-08-08 14:52:28 +00:00
|
|
|
"""Startup Z-Wave network."""
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Starting Z-Wave network...")
|
2017-04-12 17:09:29 +00:00
|
|
|
network.start()
|
2016-09-30 15:43:18 +00:00
|
|
|
hass.bus.fire(const.EVENT_NETWORK_START)
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2018-03-17 03:27:05 +00:00
|
|
|
async def _check_awaked():
|
2018-01-24 08:27:58 +00:00
|
|
|
"""Wait for Z-wave awaked state (or timeout) and finalize start."""
|
2016-06-11 05:53:31 +00:00
|
|
|
_LOGGER.debug(
|
2017-05-10 01:56:41 +00:00
|
|
|
"network state: %d %s", network.state,
|
2017-04-12 17:09:29 +00:00
|
|
|
network.state_str)
|
2016-03-27 18:29:39 +00:00
|
|
|
|
2018-01-24 08:27:58 +00:00
|
|
|
start_time = dt_util.utcnow()
|
|
|
|
while True:
|
|
|
|
waited = int((dt_util.utcnow()-start_time).total_seconds())
|
|
|
|
|
|
|
|
if network.state >= network.STATE_AWAKED:
|
|
|
|
# Need to be in STATE_AWAKED before talking to nodes.
|
|
|
|
_LOGGER.info("Z-Wave ready after %d seconds", waited)
|
|
|
|
break
|
|
|
|
elif waited >= const.NETWORK_READY_WAIT_SECS:
|
|
|
|
# Wait up to NETWORK_READY_WAIT_SECS seconds for the Z-Wave
|
|
|
|
# network to be ready.
|
|
|
|
_LOGGER.warning(
|
|
|
|
"Z-Wave not ready after %d seconds, continuing anyway",
|
|
|
|
waited)
|
|
|
|
_LOGGER.info(
|
|
|
|
"final network state: %d %s", network.state,
|
|
|
|
network.state_str)
|
|
|
|
break
|
|
|
|
else:
|
2019-05-23 04:09:59 +00:00
|
|
|
await asyncio.sleep(1)
|
2018-01-24 08:27:58 +00:00
|
|
|
|
|
|
|
hass.async_add_job(_finalize_start)
|
|
|
|
|
|
|
|
hass.add_job(_check_awaked)
|
|
|
|
|
|
|
|
def _finalize_start():
|
|
|
|
"""Perform final initializations after Z-Wave network is awaked."""
|
2016-02-03 11:52:35 +00:00
|
|
|
polling_interval = convert(
|
2018-10-09 14:30:55 +00:00
|
|
|
config.get(CONF_POLLING_INTERVAL), int)
|
2015-11-15 16:45:20 +00:00
|
|
|
if polling_interval is not None:
|
2017-04-12 17:09:29 +00:00
|
|
|
network.set_poll_interval(polling_interval, False)
|
2015-11-15 16:45:20 +00:00
|
|
|
|
2017-04-12 17:09:29 +00:00
|
|
|
poll_interval = network.get_poll_interval()
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Z-Wave polling interval set to %d ms", poll_interval)
|
2016-03-27 18:29:39 +00:00
|
|
|
|
2017-04-07 13:17:23 +00:00
|
|
|
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_network)
|
2015-02-23 01:36:28 +00:00
|
|
|
|
2016-08-02 18:17:10 +00:00
|
|
|
# Register node services for Z-Wave network
|
2018-01-07 22:54:16 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_ADD_NODE, add_node)
|
2016-09-30 15:43:18 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_ADD_NODE_SECURE,
|
2018-01-07 22:54:16 +00:00
|
|
|
add_node_secure)
|
|
|
|
hass.services.register(DOMAIN, const.SERVICE_REMOVE_NODE, remove_node)
|
2016-09-30 15:43:18 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_CANCEL_COMMAND,
|
2018-01-07 22:54:16 +00:00
|
|
|
cancel_command)
|
2016-09-30 15:43:18 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_HEAL_NETWORK,
|
2018-01-07 22:54:16 +00:00
|
|
|
heal_network)
|
|
|
|
hass.services.register(DOMAIN, const.SERVICE_SOFT_RESET, soft_reset)
|
2018-05-02 19:06:09 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_UPDATE_CONFIG,
|
|
|
|
update_config)
|
2016-09-30 15:43:18 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_TEST_NETWORK,
|
2018-01-07 22:54:16 +00:00
|
|
|
test_network)
|
2017-04-07 13:17:23 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_STOP_NETWORK,
|
2018-01-07 22:54:16 +00:00
|
|
|
stop_network)
|
2016-09-30 15:43:18 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_RENAME_NODE, rename_node,
|
2016-08-21 21:36:44 +00:00
|
|
|
schema=RENAME_NODE_SCHEMA)
|
2017-06-03 06:03:00 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_RENAME_VALUE,
|
|
|
|
rename_value,
|
|
|
|
schema=RENAME_VALUE_SCHEMA)
|
2016-10-05 05:04:19 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_SET_CONFIG_PARAMETER,
|
|
|
|
set_config_parameter,
|
|
|
|
schema=SET_CONFIG_PARAMETER_SCHEMA)
|
2018-09-27 10:34:42 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_SET_NODE_VALUE,
|
|
|
|
set_node_value,
|
|
|
|
schema=SET_NODE_VALUE_SCHEMA)
|
|
|
|
hass.services.register(DOMAIN, const.SERVICE_REFRESH_NODE_VALUE,
|
|
|
|
refresh_node_value,
|
|
|
|
schema=REFRESH_NODE_VALUE_SCHEMA)
|
2017-01-01 20:10:45 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_PRINT_CONFIG_PARAMETER,
|
|
|
|
print_config_parameter,
|
|
|
|
schema=PRINT_CONFIG_PARAMETER_SCHEMA)
|
2017-03-02 11:36:40 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_REMOVE_FAILED_NODE,
|
|
|
|
remove_failed_node,
|
|
|
|
schema=NODE_SERVICE_SCHEMA)
|
|
|
|
hass.services.register(DOMAIN, const.SERVICE_REPLACE_FAILED_NODE,
|
|
|
|
replace_failed_node,
|
|
|
|
schema=NODE_SERVICE_SCHEMA)
|
|
|
|
|
2016-10-16 18:36:06 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_CHANGE_ASSOCIATION,
|
|
|
|
change_association,
|
|
|
|
schema=CHANGE_ASSOCIATION_SCHEMA)
|
2017-02-10 16:54:48 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_SET_WAKEUP,
|
|
|
|
set_wakeup,
|
|
|
|
schema=SET_WAKEUP_SCHEMA)
|
2017-02-17 20:03:55 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_PRINT_NODE,
|
|
|
|
print_node,
|
2017-03-02 11:36:40 +00:00
|
|
|
schema=NODE_SERVICE_SCHEMA)
|
2017-03-04 17:13:24 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_REFRESH_ENTITY,
|
|
|
|
async_refresh_entity,
|
|
|
|
schema=REFRESH_ENTITY_SCHEMA)
|
|
|
|
hass.services.register(DOMAIN, const.SERVICE_REFRESH_NODE,
|
|
|
|
refresh_node,
|
|
|
|
schema=NODE_SERVICE_SCHEMA)
|
2017-05-21 18:15:24 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_RESET_NODE_METERS,
|
|
|
|
reset_node_meters,
|
|
|
|
schema=RESET_NODE_METERS_SCHEMA)
|
2017-09-11 18:30:48 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_SET_POLL_INTENSITY,
|
|
|
|
set_poll_intensity,
|
|
|
|
schema=SET_POLL_INTENSITY_SCHEMA)
|
2017-11-05 17:19:19 +00:00
|
|
|
hass.services.register(DOMAIN, const.SERVICE_HEAL_NODE,
|
|
|
|
heal_node,
|
|
|
|
schema=HEAL_NODE_SCHEMA)
|
|
|
|
hass.services.register(DOMAIN, const.SERVICE_TEST_NODE,
|
|
|
|
test_node,
|
|
|
|
schema=TEST_NODE_SCHEMA)
|
2016-01-10 13:44:46 +00:00
|
|
|
|
2016-06-11 05:53:31 +00:00
|
|
|
# Setup autoheal
|
|
|
|
if autoheal:
|
2017-05-02 20:47:20 +00:00
|
|
|
_LOGGER.info("Z-Wave network autoheal is enabled")
|
2018-05-12 21:45:36 +00:00
|
|
|
async_track_time_change(hass, heal_network, hour=0, minute=0, second=0)
|
2016-06-11 05:53:31 +00:00
|
|
|
|
2018-05-12 21:45:36 +00:00
|
|
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_zwave)
|
2015-03-03 16:49:31 +00:00
|
|
|
|
2018-10-09 14:30:55 +00:00
|
|
|
hass.services.async_register(DOMAIN, const.SERVICE_START_NETWORK,
|
|
|
|
start_zwave)
|
|
|
|
|
2018-10-16 12:58:25 +00:00
|
|
|
for entry_component in SUPPORTED_PLATFORMS:
|
|
|
|
hass.async_create_task(hass.config_entries.async_forward_entry_setup(
|
|
|
|
config_entry, entry_component))
|
|
|
|
|
2015-03-03 16:49:31 +00:00
|
|
|
return True
|
2016-01-26 21:11:27 +00:00
|
|
|
|
|
|
|
|
2017-03-14 23:55:33 +00:00
|
|
|
class ZWaveDeviceEntityValues():
|
|
|
|
"""Manages entity access to the underlying zwave value objects."""
|
|
|
|
|
|
|
|
def __init__(self, hass, schema, primary_value, zwave_config,
|
2018-05-12 21:45:36 +00:00
|
|
|
device_config, registry):
|
2017-03-14 23:55:33 +00:00
|
|
|
"""Initialize the values object with the passed entity schema."""
|
|
|
|
self._hass = hass
|
|
|
|
self._zwave_config = zwave_config
|
|
|
|
self._device_config = device_config
|
|
|
|
self._schema = copy.deepcopy(schema)
|
|
|
|
self._values = {}
|
|
|
|
self._entity = None
|
|
|
|
self._workaround_ignore = False
|
2018-05-12 21:45:36 +00:00
|
|
|
self._registry = registry
|
2017-03-14 23:55:33 +00:00
|
|
|
|
2017-03-23 15:37:20 +00:00
|
|
|
for name in self._schema[const.DISC_VALUES].keys():
|
2017-03-14 23:55:33 +00:00
|
|
|
self._values[name] = None
|
|
|
|
self._schema[const.DISC_VALUES][name][const.DISC_INSTANCE] = \
|
|
|
|
[primary_value.instance]
|
|
|
|
|
|
|
|
self._values[const.DISC_PRIMARY] = primary_value
|
|
|
|
self._node = primary_value.node
|
|
|
|
self._schema[const.DISC_NODE_ID] = [self._node.node_id]
|
|
|
|
|
|
|
|
# Check values that have already been discovered for node
|
|
|
|
for value in self._node.values.values():
|
|
|
|
self.check_value(value)
|
|
|
|
|
|
|
|
self._check_entity_ready()
|
|
|
|
|
|
|
|
def __getattr__(self, name):
|
|
|
|
"""Get the specified value for this entity."""
|
|
|
|
return self._values[name]
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
"""Allow iteration over all values."""
|
|
|
|
return iter(self._values.values())
|
|
|
|
|
|
|
|
def check_value(self, value):
|
|
|
|
"""Check if the new value matches a missing value for this entity.
|
|
|
|
|
|
|
|
If a match is found, it is added to the values mapping.
|
|
|
|
"""
|
|
|
|
if not check_node_schema(value.node, self._schema):
|
|
|
|
return
|
|
|
|
for name in self._values:
|
|
|
|
if self._values[name] is not None:
|
|
|
|
continue
|
|
|
|
if not check_value_schema(
|
|
|
|
value, self._schema[const.DISC_VALUES][name]):
|
|
|
|
continue
|
|
|
|
self._values[name] = value
|
|
|
|
if self._entity:
|
2017-04-03 18:56:48 +00:00
|
|
|
self._entity.value_added()
|
2017-03-14 23:55:33 +00:00
|
|
|
self._entity.value_changed()
|
|
|
|
|
|
|
|
self._check_entity_ready()
|
|
|
|
|
|
|
|
def _check_entity_ready(self):
|
|
|
|
"""Check if all required values are discovered and create entity."""
|
|
|
|
if self._workaround_ignore:
|
|
|
|
return
|
|
|
|
if self._entity is not None:
|
|
|
|
return
|
|
|
|
|
|
|
|
for name in self._schema[const.DISC_VALUES]:
|
|
|
|
if self._values[name] is None and \
|
|
|
|
not self._schema[const.DISC_VALUES][name].get(
|
|
|
|
const.DISC_OPTIONAL):
|
|
|
|
return
|
|
|
|
|
|
|
|
component = self._schema[const.DISC_COMPONENT]
|
|
|
|
|
|
|
|
workaround_component = workaround.get_device_component_mapping(
|
|
|
|
self.primary)
|
|
|
|
if workaround_component and workaround_component != component:
|
|
|
|
if workaround_component == workaround.WORKAROUND_IGNORE:
|
2017-06-16 17:07:17 +00:00
|
|
|
_LOGGER.info("Ignoring Node %d Value %d due to workaround.",
|
|
|
|
self.primary.node.node_id, self.primary.value_id)
|
2017-03-14 23:55:33 +00:00
|
|
|
# No entity will be created for this value
|
|
|
|
self._workaround_ignore = True
|
|
|
|
return
|
|
|
|
_LOGGER.debug("Using %s instead of %s",
|
|
|
|
workaround_component, component)
|
|
|
|
component = workaround_component
|
|
|
|
|
2018-05-12 21:45:36 +00:00
|
|
|
entity_id = self._registry.async_get_entity_id(
|
|
|
|
component, DOMAIN,
|
|
|
|
compute_value_unique_id(self._node, self.primary))
|
|
|
|
if entity_id is None:
|
|
|
|
value_name = _value_name(self.primary)
|
|
|
|
entity_id = generate_entity_id(component + '.{}', value_name, [])
|
|
|
|
node_config = self._device_config.get(entity_id)
|
2017-03-14 23:55:33 +00:00
|
|
|
|
|
|
|
# Configure node
|
|
|
|
_LOGGER.debug("Adding Node_id=%s Generic_command_class=%s, "
|
|
|
|
"Specific_command_class=%s, "
|
|
|
|
"Command_class=%s, Value type=%s, "
|
|
|
|
"Genre=%s as %s", self._node.node_id,
|
|
|
|
self._node.generic, self._node.specific,
|
|
|
|
self.primary.command_class, self.primary.type,
|
|
|
|
self.primary.genre, component)
|
|
|
|
|
|
|
|
if node_config.get(CONF_IGNORED):
|
|
|
|
_LOGGER.info(
|
2018-05-12 21:45:36 +00:00
|
|
|
"Ignoring entity %s due to device settings", entity_id)
|
2017-03-14 23:55:33 +00:00
|
|
|
# No entity will be created for this value
|
|
|
|
self._workaround_ignore = True
|
|
|
|
return
|
|
|
|
|
|
|
|
polling_intensity = convert(
|
|
|
|
node_config.get(CONF_POLLING_INTENSITY), int)
|
|
|
|
if polling_intensity:
|
|
|
|
self.primary.enable_poll(polling_intensity)
|
|
|
|
|
2019-04-12 16:22:56 +00:00
|
|
|
platform = import_module('.{}'.format(component),
|
|
|
|
__name__)
|
|
|
|
|
2017-03-14 23:55:33 +00:00
|
|
|
device = platform.get_device(
|
|
|
|
node=self._node, values=self,
|
|
|
|
node_config=node_config, hass=self._hass)
|
2017-04-08 13:34:59 +00:00
|
|
|
if device is None:
|
2017-03-14 23:55:33 +00:00
|
|
|
# No entity will be created for this value
|
|
|
|
self._workaround_ignore = True
|
|
|
|
return
|
|
|
|
|
|
|
|
self._entity = device
|
|
|
|
|
2018-05-08 19:30:28 +00:00
|
|
|
@callback
|
|
|
|
def _on_ready(sec):
|
|
|
|
_LOGGER.info(
|
|
|
|
"Z-Wave entity %s (node_id: %d) ready after %d seconds",
|
|
|
|
device.name, self._node.node_id, sec)
|
2018-11-01 18:38:23 +00:00
|
|
|
self._hass.async_add_job(discover_device, component, device)
|
2018-05-08 19:30:28 +00:00
|
|
|
|
|
|
|
@callback
|
|
|
|
def _on_timeout(sec):
|
|
|
|
_LOGGER.warning(
|
|
|
|
"Z-Wave entity %s (node_id: %d) not ready after %d seconds, "
|
|
|
|
"continuing anyway",
|
|
|
|
device.name, self._node.node_id, sec)
|
2018-11-01 18:38:23 +00:00
|
|
|
self._hass.async_add_job(discover_device, component, device)
|
2018-05-08 19:30:28 +00:00
|
|
|
|
2018-11-01 18:38:23 +00:00
|
|
|
async def discover_device(component, device):
|
2017-03-14 23:55:33 +00:00
|
|
|
"""Put device in a dictionary and call discovery on it."""
|
2018-11-01 18:38:23 +00:00
|
|
|
if self._hass.data[DATA_DEVICES].get(device.unique_id):
|
|
|
|
return
|
|
|
|
|
|
|
|
self._hass.data[DATA_DEVICES][device.unique_id] = device
|
2018-10-16 12:58:25 +00:00
|
|
|
if component in SUPPORTED_PLATFORMS:
|
|
|
|
async_dispatcher_send(
|
|
|
|
self._hass, 'zwave_new_{}'.format(component), device)
|
|
|
|
else:
|
|
|
|
await discovery.async_load_platform(
|
|
|
|
self._hass, component, DOMAIN,
|
2018-11-01 18:38:23 +00:00
|
|
|
{const.DISCOVERY_DEVICE: device.unique_id},
|
|
|
|
self._zwave_config)
|
2018-05-08 19:30:28 +00:00
|
|
|
|
|
|
|
if device.unique_id:
|
2018-11-01 18:38:23 +00:00
|
|
|
self._hass.add_job(discover_device, component, device)
|
2018-05-08 19:30:28 +00:00
|
|
|
else:
|
|
|
|
self._hass.add_job(check_has_unique_id, device, _on_ready,
|
2019-07-01 22:54:19 +00:00
|
|
|
_on_timeout)
|
2017-03-14 23:55:33 +00:00
|
|
|
|
|
|
|
|
2017-03-23 15:37:20 +00:00
|
|
|
class ZWaveDeviceEntity(ZWaveBaseEntity):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Representation of a Z-Wave node entity."""
|
|
|
|
|
2017-03-14 23:55:33 +00:00
|
|
|
def __init__(self, values, domain):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Initialize the z-Wave device."""
|
2017-01-27 06:21:33 +00:00
|
|
|
# pylint: disable=import-error
|
2017-03-23 15:37:20 +00:00
|
|
|
super().__init__()
|
2017-01-27 06:21:33 +00:00
|
|
|
from openzwave.network import ZWaveNetwork
|
|
|
|
from pydispatch import dispatcher
|
2017-03-14 23:55:33 +00:00
|
|
|
self.values = values
|
|
|
|
self.node = values.primary.node
|
|
|
|
self.values.primary.set_change_verified(False)
|
|
|
|
|
|
|
|
self._name = _value_name(self.values.primary)
|
2018-05-08 19:30:28 +00:00
|
|
|
self._unique_id = self._compute_unique_id()
|
2017-02-18 07:56:05 +00:00
|
|
|
self._update_attributes()
|
2016-01-26 21:11:27 +00:00
|
|
|
|
2017-01-27 06:21:33 +00:00
|
|
|
dispatcher.connect(
|
|
|
|
self.network_value_changed, ZWaveNetwork.SIGNAL_VALUE_CHANGED)
|
|
|
|
|
|
|
|
def network_value_changed(self, value):
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Handle a value change on the network."""
|
2017-03-14 23:55:33 +00:00
|
|
|
if value.value_id in [v.value_id for v in self.values if v]:
|
2017-03-02 06:41:19 +00:00
|
|
|
return self.value_changed()
|
2017-01-27 06:21:33 +00:00
|
|
|
|
2017-04-03 18:56:48 +00:00
|
|
|
def value_added(self):
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Handle a new value of this entity."""
|
2017-04-03 18:56:48 +00:00
|
|
|
pass
|
|
|
|
|
2017-03-02 06:41:19 +00:00
|
|
|
def value_changed(self):
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Handle a changed value for this entity's node."""
|
2017-02-18 07:56:05 +00:00
|
|
|
self._update_attributes()
|
2017-01-27 06:21:33 +00:00
|
|
|
self.update_properties()
|
2017-03-23 15:37:20 +00:00
|
|
|
self.maybe_schedule_update()
|
2017-01-27 06:21:33 +00:00
|
|
|
|
2019-07-01 22:54:19 +00:00
|
|
|
async def value_renamed(self, update_ids=False):
|
|
|
|
"""Rename the node and update any IDs."""
|
|
|
|
self._name = _value_name(self.values.primary)
|
|
|
|
if update_ids:
|
|
|
|
# Update entity ID.
|
|
|
|
ent_reg = await async_get_registry(self.hass)
|
|
|
|
new_entity_id = ent_reg.async_generate_entity_id(
|
|
|
|
self.platform.domain,
|
|
|
|
self._name,
|
|
|
|
self.platform.entities.keys() - {self.entity_id})
|
|
|
|
if new_entity_id != self.entity_id:
|
|
|
|
# Don't change the name attribute, it will be None unless
|
|
|
|
# customised and if it's been customised, keep the
|
|
|
|
# customisation.
|
|
|
|
ent_reg.async_update_entity(
|
|
|
|
self.entity_id, new_entity_id=new_entity_id)
|
|
|
|
return
|
2019-07-07 05:36:57 +00:00
|
|
|
# else for the above two ifs, update if not using update_entity
|
2019-07-01 22:54:19 +00:00
|
|
|
self.async_schedule_update_ha_state()
|
|
|
|
|
2018-03-17 03:27:05 +00:00
|
|
|
async def async_added_to_hass(self):
|
2017-03-04 17:13:24 +00:00
|
|
|
"""Add device to dict."""
|
|
|
|
async_dispatcher_connect(
|
|
|
|
self.hass,
|
|
|
|
SIGNAL_REFRESH_ENTITY_FORMAT.format(self.entity_id),
|
|
|
|
self.refresh_from_network)
|
|
|
|
|
2017-03-14 23:55:33 +00:00
|
|
|
def _update_attributes(self):
|
|
|
|
"""Update the node attributes. May only be used inside callback."""
|
|
|
|
self.node_id = self.node.node_id
|
2018-05-08 19:30:28 +00:00
|
|
|
self._name = _value_name(self.values.primary)
|
|
|
|
if not self._unique_id:
|
|
|
|
self._unique_id = self._compute_unique_id()
|
|
|
|
if self._unique_id:
|
|
|
|
self.try_remove_and_add()
|
2017-03-02 06:41:19 +00:00
|
|
|
|
2017-03-14 23:55:33 +00:00
|
|
|
if self.values.power:
|
|
|
|
self.power_consumption = round(
|
|
|
|
self.values.power.data, self.values.power.precision)
|
|
|
|
else:
|
|
|
|
self.power_consumption = None
|
2017-02-09 12:40:35 +00:00
|
|
|
|
2017-01-27 06:21:33 +00:00
|
|
|
def update_properties(self):
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Update on data changes for node values."""
|
2017-01-27 06:21:33 +00:00
|
|
|
pass
|
|
|
|
|
2016-01-26 21:11:27 +00:00
|
|
|
@property
|
|
|
|
def should_poll(self):
|
2016-03-07 17:49:31 +00:00
|
|
|
"""No polling needed."""
|
2016-01-26 21:11:27 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
@property
|
|
|
|
def unique_id(self):
|
2018-03-03 18:23:55 +00:00
|
|
|
"""Return a unique ID."""
|
2017-03-03 12:47:59 +00:00
|
|
|
return self._unique_id
|
2016-01-26 21:11:27 +00:00
|
|
|
|
2018-10-16 12:58:25 +00:00
|
|
|
@property
|
|
|
|
def device_info(self):
|
|
|
|
"""Return device information."""
|
2019-07-07 05:36:57 +00:00
|
|
|
identifier, name = node_device_id_and_name(
|
|
|
|
self.node, self.values.primary.instance)
|
2019-06-03 16:40:40 +00:00
|
|
|
info = {
|
2019-07-07 05:36:57 +00:00
|
|
|
'name': name,
|
|
|
|
'identifiers': {
|
|
|
|
identifier
|
|
|
|
},
|
2018-10-16 12:58:25 +00:00
|
|
|
'manufacturer': self.node.manufacturer_name,
|
|
|
|
'model': self.node.product_name,
|
|
|
|
}
|
2019-06-03 16:40:40 +00:00
|
|
|
if self.values.primary.instance > 1:
|
2019-06-10 16:10:44 +00:00
|
|
|
info['via_device'] = (DOMAIN, self.node_id, )
|
2019-07-07 05:36:57 +00:00
|
|
|
elif self.node_id > 1:
|
|
|
|
info['via_device'] = (DOMAIN, 1, )
|
2019-06-03 16:40:40 +00:00
|
|
|
return info
|
2018-10-16 12:58:25 +00:00
|
|
|
|
2016-01-26 21:11:27 +00:00
|
|
|
@property
|
|
|
|
def name(self):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Return the name of the device."""
|
2017-03-03 12:47:59 +00:00
|
|
|
return self._name
|
2016-01-26 21:11:27 +00:00
|
|
|
|
|
|
|
@property
|
2016-02-04 01:12:33 +00:00
|
|
|
def device_state_attributes(self):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Return the device specific state attributes."""
|
2016-01-26 21:11:27 +00:00
|
|
|
attrs = {
|
2017-02-18 07:56:05 +00:00
|
|
|
const.ATTR_NODE_ID: self.node_id,
|
2017-06-16 17:07:17 +00:00
|
|
|
const.ATTR_VALUE_INDEX: self.values.primary.index,
|
|
|
|
const.ATTR_VALUE_INSTANCE: self.values.primary.instance,
|
2017-09-11 18:30:48 +00:00
|
|
|
const.ATTR_VALUE_ID: str(self.values.primary.value_id),
|
2016-01-26 21:11:27 +00:00
|
|
|
}
|
|
|
|
|
2017-02-19 00:51:13 +00:00
|
|
|
if self.power_consumption is not None:
|
|
|
|
attrs[ATTR_POWER] = self.power_consumption
|
|
|
|
|
2016-01-26 21:11:27 +00:00
|
|
|
return attrs
|
2017-03-04 17:13:24 +00:00
|
|
|
|
|
|
|
def refresh_from_network(self):
|
|
|
|
"""Refresh all dependent values from zwave network."""
|
2017-03-14 23:55:33 +00:00
|
|
|
for value in self.values:
|
2017-04-07 13:17:23 +00:00
|
|
|
if value is not None:
|
|
|
|
self.node.refresh_value(value.value_id)
|
2018-05-08 19:30:28 +00:00
|
|
|
|
|
|
|
def _compute_unique_id(self):
|
|
|
|
if (is_node_parsed(self.node) and
|
|
|
|
self.values.primary.label != "Unknown") or \
|
|
|
|
self.node.is_ready:
|
2018-05-12 21:45:36 +00:00
|
|
|
return compute_value_unique_id(self.node, self.values.primary)
|
2018-05-08 19:30:28 +00:00
|
|
|
return None
|
2018-05-12 21:45:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
def compute_value_unique_id(node, value):
|
|
|
|
"""Compute unique_id a value would get if it were to get one."""
|
|
|
|
return "{}-{}".format(node.node_id, value.object_id)
|