2019-02-14 04:35:12 +00:00
|
|
|
"""Support for sending data to an Influx database."""
|
2015-11-21 18:01:47 +00:00
|
|
|
import logging
|
Make percentage string values as floats/ints in InfluxDB (#7879)
* Make percentage string values as floats in InfluxDB
Currently Z-wave and other compontents report an attributes battery
level as an integer, for example
```yaml
{
"is_awake": false,
"battery_level": 61,
}
```
However, some other components like Vera add the battery level as a
string
```yaml
{
"Vera Device Id": 25,
"device_armed": "False",
"battery_level": "63%",
"device_tripped": "False",
}
```
By removing any % signs in the field, this will send the value to
InfluxDB as an int, which can then be used to plot the data in graphs
correctly, like other percentage fields.
* Add tests and remove all trailing non digits
Adds tests and now removes all trailing non-numeric characters for
better use
* Update variable name for InfluxDB digit checks
Updates the variable used for the regex to remove trailing non digits
* Fix linting errors for InfluxDB component
Fixes a small linting error on the InfluxDB component
2017-06-13 22:42:55 +00:00
|
|
|
import re
|
2018-02-08 11:25:26 +00:00
|
|
|
import queue
|
|
|
|
import threading
|
|
|
|
import time
|
2018-05-09 00:54:38 +00:00
|
|
|
import math
|
Make percentage string values as floats/ints in InfluxDB (#7879)
* Make percentage string values as floats in InfluxDB
Currently Z-wave and other compontents report an attributes battery
level as an integer, for example
```yaml
{
"is_awake": false,
"battery_level": 61,
}
```
However, some other components like Vera add the battery level as a
string
```yaml
{
"Vera Device Id": 25,
"device_armed": "False",
"battery_level": "63%",
"device_tripped": "False",
}
```
By removing any % signs in the field, this will send the value to
InfluxDB as an int, which can then be used to plot the data in graphs
correctly, like other percentage fields.
* Add tests and remove all trailing non digits
Adds tests and now removes all trailing non-numeric characters for
better use
* Update variable name for InfluxDB digit checks
Updates the variable used for the regex to remove trailing non digits
* Fix linting errors for InfluxDB component
Fixes a small linting error on the InfluxDB component
2017-06-13 22:42:55 +00:00
|
|
|
|
2017-11-09 19:17:01 +00:00
|
|
|
import requests.exceptions
|
2016-09-18 22:32:18 +00:00
|
|
|
import voluptuous as vol
|
|
|
|
|
|
|
|
from homeassistant.const import (
|
2019-07-31 19:25:30 +00:00
|
|
|
CONF_DOMAINS,
|
|
|
|
CONF_ENTITIES,
|
|
|
|
CONF_EXCLUDE,
|
|
|
|
CONF_HOST,
|
|
|
|
CONF_INCLUDE,
|
|
|
|
CONF_PASSWORD,
|
|
|
|
CONF_PORT,
|
|
|
|
CONF_SSL,
|
|
|
|
CONF_USERNAME,
|
|
|
|
CONF_VERIFY_SSL,
|
|
|
|
EVENT_STATE_CHANGED,
|
|
|
|
EVENT_HOMEASSISTANT_STOP,
|
|
|
|
STATE_UNAVAILABLE,
|
|
|
|
STATE_UNKNOWN,
|
|
|
|
)
|
2019-03-31 20:00:48 +00:00
|
|
|
from homeassistant.helpers import state as state_helper, event as event_helper
|
2018-01-21 06:35:38 +00:00
|
|
|
import homeassistant.helpers.config_validation as cv
|
2017-08-03 14:26:01 +00:00
|
|
|
from homeassistant.helpers.entity_values import EntityValues
|
2016-09-18 22:32:18 +00:00
|
|
|
|
2015-11-21 18:01:47 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
CONF_DB_NAME = "database"
|
|
|
|
CONF_TAGS = "tags"
|
|
|
|
CONF_DEFAULT_MEASUREMENT = "default_measurement"
|
|
|
|
CONF_OVERRIDE_MEASUREMENT = "override_measurement"
|
|
|
|
CONF_TAGS_ATTRIBUTES = "tags_attributes"
|
|
|
|
CONF_COMPONENT_CONFIG = "component_config"
|
|
|
|
CONF_COMPONENT_CONFIG_GLOB = "component_config_glob"
|
|
|
|
CONF_COMPONENT_CONFIG_DOMAIN = "component_config_domain"
|
|
|
|
CONF_RETRY_COUNT = "max_retries"
|
|
|
|
|
|
|
|
DEFAULT_DATABASE = "home_assistant"
|
2016-12-06 07:39:22 +00:00
|
|
|
DEFAULT_VERIFY_SSL = True
|
2019-07-31 19:25:30 +00:00
|
|
|
DOMAIN = "influxdb"
|
2018-02-04 17:23:26 +00:00
|
|
|
|
2016-10-02 05:29:06 +00:00
|
|
|
TIMEOUT = 5
|
2018-02-08 11:25:26 +00:00
|
|
|
RETRY_DELAY = 20
|
2018-03-04 05:22:31 +00:00
|
|
|
QUEUE_BACKLOG_SECONDS = 30
|
2019-03-31 20:00:48 +00:00
|
|
|
RETRY_INTERVAL = 60 # seconds
|
2018-03-04 05:22:31 +00:00
|
|
|
|
|
|
|
BATCH_TIMEOUT = 1
|
|
|
|
BATCH_BUFFER_SIZE = 100
|
2016-09-18 22:32:18 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
COMPONENT_CONFIG_SCHEMA_ENTRY = vol.Schema(
|
|
|
|
{vol.Optional(CONF_OVERRIDE_MEASUREMENT): cv.string}
|
|
|
|
)
|
|
|
|
|
|
|
|
CONFIG_SCHEMA = vol.Schema(
|
|
|
|
{
|
|
|
|
DOMAIN: vol.All(
|
|
|
|
vol.Schema(
|
|
|
|
{
|
|
|
|
vol.Optional(CONF_HOST): cv.string,
|
|
|
|
vol.Inclusive(CONF_USERNAME, "authentication"): cv.string,
|
|
|
|
vol.Inclusive(CONF_PASSWORD, "authentication"): cv.string,
|
|
|
|
vol.Optional(CONF_EXCLUDE, default={}): vol.Schema(
|
|
|
|
{
|
|
|
|
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
|
|
|
|
vol.Optional(CONF_DOMAINS, default=[]): vol.All(
|
|
|
|
cv.ensure_list, [cv.string]
|
|
|
|
),
|
|
|
|
}
|
|
|
|
),
|
|
|
|
vol.Optional(CONF_INCLUDE, default={}): vol.Schema(
|
|
|
|
{
|
|
|
|
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
|
|
|
|
vol.Optional(CONF_DOMAINS, default=[]): vol.All(
|
|
|
|
cv.ensure_list, [cv.string]
|
|
|
|
),
|
|
|
|
}
|
|
|
|
),
|
|
|
|
vol.Optional(CONF_DB_NAME, default=DEFAULT_DATABASE): cv.string,
|
|
|
|
vol.Optional(CONF_PORT): cv.port,
|
|
|
|
vol.Optional(CONF_SSL): cv.boolean,
|
|
|
|
vol.Optional(CONF_RETRY_COUNT, default=0): cv.positive_int,
|
|
|
|
vol.Optional(CONF_DEFAULT_MEASUREMENT): cv.string,
|
|
|
|
vol.Optional(CONF_OVERRIDE_MEASUREMENT): cv.string,
|
|
|
|
vol.Optional(CONF_TAGS, default={}): vol.Schema(
|
|
|
|
{cv.string: cv.string}
|
|
|
|
),
|
|
|
|
vol.Optional(CONF_TAGS_ATTRIBUTES, default=[]): vol.All(
|
|
|
|
cv.ensure_list, [cv.string]
|
|
|
|
),
|
|
|
|
vol.Optional(
|
|
|
|
CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL
|
|
|
|
): cv.boolean,
|
|
|
|
vol.Optional(CONF_COMPONENT_CONFIG, default={}): vol.Schema(
|
|
|
|
{cv.entity_id: COMPONENT_CONFIG_SCHEMA_ENTRY}
|
|
|
|
),
|
|
|
|
vol.Optional(CONF_COMPONENT_CONFIG_GLOB, default={}): vol.Schema(
|
|
|
|
{cv.string: COMPONENT_CONFIG_SCHEMA_ENTRY}
|
|
|
|
),
|
|
|
|
vol.Optional(CONF_COMPONENT_CONFIG_DOMAIN, default={}): vol.Schema(
|
|
|
|
{cv.string: COMPONENT_CONFIG_SCHEMA_ENTRY}
|
|
|
|
),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
)
|
|
|
|
},
|
|
|
|
extra=vol.ALLOW_EXTRA,
|
|
|
|
)
|
|
|
|
|
|
|
|
RE_DIGIT_TAIL = re.compile(r"^[^\.]*\d+\.?\d+[^\.]*$")
|
|
|
|
RE_DECIMAL = re.compile(r"[^\d.]+")
|
2017-06-20 05:53:13 +00:00
|
|
|
|
2015-11-21 18:01:47 +00:00
|
|
|
|
|
|
|
def setup(hass, config):
|
2017-04-30 05:04:49 +00:00
|
|
|
"""Set up the InfluxDB component."""
|
2015-11-25 21:47:00 +00:00
|
|
|
from influxdb import InfluxDBClient, exceptions
|
2015-11-21 18:01:47 +00:00
|
|
|
|
|
|
|
conf = config[DOMAIN]
|
|
|
|
|
2016-12-06 07:39:22 +00:00
|
|
|
kwargs = {
|
2019-07-31 19:25:30 +00:00
|
|
|
"database": conf[CONF_DB_NAME],
|
|
|
|
"verify_ssl": conf[CONF_VERIFY_SSL],
|
|
|
|
"timeout": TIMEOUT,
|
2016-12-06 07:39:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if CONF_HOST in conf:
|
2019-07-31 19:25:30 +00:00
|
|
|
kwargs["host"] = conf[CONF_HOST]
|
2016-12-06 07:39:22 +00:00
|
|
|
|
|
|
|
if CONF_PORT in conf:
|
2019-07-31 19:25:30 +00:00
|
|
|
kwargs["port"] = conf[CONF_PORT]
|
2016-12-06 07:39:22 +00:00
|
|
|
|
|
|
|
if CONF_USERNAME in conf:
|
2019-07-31 19:25:30 +00:00
|
|
|
kwargs["username"] = conf[CONF_USERNAME]
|
2016-12-06 07:39:22 +00:00
|
|
|
|
|
|
|
if CONF_PASSWORD in conf:
|
2019-07-31 19:25:30 +00:00
|
|
|
kwargs["password"] = conf[CONF_PASSWORD]
|
2016-12-06 07:39:22 +00:00
|
|
|
|
|
|
|
if CONF_SSL in conf:
|
2019-07-31 19:25:30 +00:00
|
|
|
kwargs["ssl"] = conf[CONF_SSL]
|
2016-12-06 07:39:22 +00:00
|
|
|
|
2017-04-26 19:14:52 +00:00
|
|
|
include = conf.get(CONF_INCLUDE, {})
|
|
|
|
exclude = conf.get(CONF_EXCLUDE, {})
|
|
|
|
whitelist_e = set(include.get(CONF_ENTITIES, []))
|
|
|
|
whitelist_d = set(include.get(CONF_DOMAINS, []))
|
|
|
|
blacklist_e = set(exclude.get(CONF_ENTITIES, []))
|
|
|
|
blacklist_d = set(exclude.get(CONF_DOMAINS, []))
|
2016-09-18 22:32:18 +00:00
|
|
|
tags = conf.get(CONF_TAGS)
|
2017-08-03 14:26:01 +00:00
|
|
|
tags_attributes = conf.get(CONF_TAGS_ATTRIBUTES)
|
2017-01-14 17:52:47 +00:00
|
|
|
default_measurement = conf.get(CONF_DEFAULT_MEASUREMENT)
|
|
|
|
override_measurement = conf.get(CONF_OVERRIDE_MEASUREMENT)
|
2017-08-03 14:26:01 +00:00
|
|
|
component_config = EntityValues(
|
|
|
|
conf[CONF_COMPONENT_CONFIG],
|
|
|
|
conf[CONF_COMPONENT_CONFIG_DOMAIN],
|
2019-07-31 19:25:30 +00:00
|
|
|
conf[CONF_COMPONENT_CONFIG_GLOB],
|
|
|
|
)
|
2017-11-24 00:58:18 +00:00
|
|
|
max_tries = conf.get(CONF_RETRY_COUNT)
|
2015-11-21 18:01:47 +00:00
|
|
|
|
|
|
|
try:
|
2016-12-06 07:39:22 +00:00
|
|
|
influx = InfluxDBClient(**kwargs)
|
2018-12-04 08:59:03 +00:00
|
|
|
influx.write_points([])
|
2019-07-31 19:25:30 +00:00
|
|
|
except (exceptions.InfluxDBClientError, requests.exceptions.ConnectionError) as exc:
|
2019-03-31 20:00:48 +00:00
|
|
|
_LOGGER.warning(
|
|
|
|
"Database host is not accessible due to '%s', please "
|
|
|
|
"check your entries in the configuration file (host, "
|
|
|
|
"port, etc.) and verify that the database exists and is "
|
2019-07-31 19:25:30 +00:00
|
|
|
"READ/WRITE. Retrying again in %s seconds.",
|
|
|
|
exc,
|
|
|
|
RETRY_INTERVAL,
|
2019-03-31 20:00:48 +00:00
|
|
|
)
|
2019-07-31 19:25:30 +00:00
|
|
|
event_helper.call_later(hass, RETRY_INTERVAL, lambda _: setup(hass, config))
|
2019-03-31 20:00:48 +00:00
|
|
|
return True
|
2015-11-21 18:01:47 +00:00
|
|
|
|
2018-03-04 05:22:31 +00:00
|
|
|
def event_to_json(event):
|
|
|
|
"""Add an event to the outgoing Influx list."""
|
2019-07-31 19:25:30 +00:00
|
|
|
state = event.data.get("new_state")
|
|
|
|
if (
|
|
|
|
state is None
|
|
|
|
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
|
|
|
|
or state.entity_id in blacklist_e
|
|
|
|
or state.domain in blacklist_d
|
|
|
|
):
|
2018-03-04 05:22:31 +00:00
|
|
|
return
|
2016-08-04 15:35:01 +00:00
|
|
|
|
2017-01-04 21:36:54 +00:00
|
|
|
try:
|
2019-07-31 19:25:30 +00:00
|
|
|
if (
|
|
|
|
(whitelist_e or whitelist_d)
|
|
|
|
and state.entity_id not in whitelist_e
|
|
|
|
and state.domain not in whitelist_d
|
|
|
|
):
|
2018-03-04 05:22:31 +00:00
|
|
|
return
|
2017-01-14 17:52:47 +00:00
|
|
|
|
2017-11-19 22:49:49 +00:00
|
|
|
_include_state = _include_value = False
|
|
|
|
|
|
|
|
_state_as_value = float(state.state)
|
|
|
|
_include_value = True
|
2016-02-11 17:13:57 +00:00
|
|
|
except ValueError:
|
2017-11-19 22:49:49 +00:00
|
|
|
try:
|
|
|
|
_state_as_value = float(state_helper.state_as_number(state))
|
|
|
|
_include_state = _include_value = True
|
|
|
|
except ValueError:
|
|
|
|
_include_state = True
|
2017-01-14 17:52:47 +00:00
|
|
|
|
2017-11-19 20:30:47 +00:00
|
|
|
include_uom = True
|
2017-08-03 14:26:01 +00:00
|
|
|
measurement = component_config.get(state.entity_id).get(
|
2019-07-31 19:25:30 +00:00
|
|
|
CONF_OVERRIDE_MEASUREMENT
|
|
|
|
)
|
|
|
|
if measurement in (None, ""):
|
2017-08-03 14:26:01 +00:00
|
|
|
if override_measurement:
|
|
|
|
measurement = override_measurement
|
|
|
|
else:
|
2019-07-31 19:25:30 +00:00
|
|
|
measurement = state.attributes.get("unit_of_measurement")
|
|
|
|
if measurement in (None, ""):
|
2017-08-03 14:26:01 +00:00
|
|
|
if default_measurement:
|
|
|
|
measurement = default_measurement
|
|
|
|
else:
|
|
|
|
measurement = state.entity_id
|
2017-11-19 20:30:47 +00:00
|
|
|
else:
|
|
|
|
include_uom = False
|
2015-11-25 21:47:00 +00:00
|
|
|
|
2018-03-04 05:22:31 +00:00
|
|
|
json = {
|
2019-07-31 19:25:30 +00:00
|
|
|
"measurement": measurement,
|
|
|
|
"tags": {"domain": state.domain, "entity_id": state.object_id},
|
|
|
|
"time": event.time_fired,
|
|
|
|
"fields": {},
|
2018-03-04 05:22:31 +00:00
|
|
|
}
|
2017-11-19 22:49:49 +00:00
|
|
|
if _include_state:
|
2019-07-31 19:25:30 +00:00
|
|
|
json["fields"]["state"] = state.state
|
2017-11-19 22:49:49 +00:00
|
|
|
if _include_value:
|
2019-07-31 19:25:30 +00:00
|
|
|
json["fields"]["value"] = _state_as_value
|
2015-11-21 18:01:47 +00:00
|
|
|
|
2016-09-21 05:20:05 +00:00
|
|
|
for key, value in state.attributes.items():
|
2017-08-03 14:26:01 +00:00
|
|
|
if key in tags_attributes:
|
2019-07-31 19:25:30 +00:00
|
|
|
json["tags"][key] = value
|
|
|
|
elif key != "unit_of_measurement" or include_uom:
|
2017-01-14 17:52:47 +00:00
|
|
|
# If the key is already in fields
|
2019-07-31 19:25:30 +00:00
|
|
|
if key in json["fields"]:
|
2017-01-14 17:52:47 +00:00
|
|
|
key = key + "_"
|
|
|
|
# Prevent column data errors in influxDB.
|
|
|
|
# For each value we try to cast it as float
|
|
|
|
# But if we can not do it we store the value
|
|
|
|
# as string add "_str" postfix to the field key
|
|
|
|
try:
|
2019-07-31 19:25:30 +00:00
|
|
|
json["fields"][key] = float(value)
|
2017-01-14 17:52:47 +00:00
|
|
|
except (ValueError, TypeError):
|
2019-09-03 15:27:14 +00:00
|
|
|
new_key = f"{key}_str"
|
2017-06-20 05:53:13 +00:00
|
|
|
new_value = str(value)
|
2019-07-31 19:25:30 +00:00
|
|
|
json["fields"][new_key] = new_value
|
2017-06-20 05:53:13 +00:00
|
|
|
|
|
|
|
if RE_DIGIT_TAIL.match(new_value):
|
2019-07-31 19:25:30 +00:00
|
|
|
json["fields"][key] = float(RE_DECIMAL.sub("", new_value))
|
2016-09-21 05:20:05 +00:00
|
|
|
|
2018-05-09 00:54:38 +00:00
|
|
|
# Infinity and NaN are not valid floats in InfluxDB
|
|
|
|
try:
|
2019-07-31 19:25:30 +00:00
|
|
|
if not math.isfinite(json["fields"][key]):
|
|
|
|
del json["fields"][key]
|
2018-05-09 00:54:38 +00:00
|
|
|
except (KeyError, TypeError):
|
|
|
|
pass
|
2018-03-04 20:01:16 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
json["tags"].update(tags)
|
2016-07-26 06:01:57 +00:00
|
|
|
|
2018-03-04 05:22:31 +00:00
|
|
|
return json
|
2015-11-21 18:01:47 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
instance = hass.data[DOMAIN] = InfluxThread(hass, influx, event_to_json, max_tries)
|
2018-02-08 11:25:26 +00:00
|
|
|
instance.start()
|
2017-11-24 00:58:18 +00:00
|
|
|
|
2018-02-08 11:25:26 +00:00
|
|
|
def shutdown(event):
|
|
|
|
"""Shut down the thread."""
|
|
|
|
instance.queue.put(None)
|
|
|
|
instance.join()
|
2018-03-04 20:01:16 +00:00
|
|
|
influx.close()
|
2017-11-24 00:58:18 +00:00
|
|
|
|
2018-02-08 11:25:26 +00:00
|
|
|
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
|
2017-11-24 00:58:18 +00:00
|
|
|
|
2018-02-08 11:25:26 +00:00
|
|
|
return True
|
2017-11-24 00:58:18 +00:00
|
|
|
|
|
|
|
|
2018-02-08 11:25:26 +00:00
|
|
|
class InfluxThread(threading.Thread):
|
|
|
|
"""A threaded event handler class."""
|
2017-11-24 00:58:18 +00:00
|
|
|
|
2018-03-04 05:22:31 +00:00
|
|
|
def __init__(self, hass, influx, event_to_json, max_tries):
|
2018-02-08 11:25:26 +00:00
|
|
|
"""Initialize the listener."""
|
2019-07-31 19:25:30 +00:00
|
|
|
threading.Thread.__init__(self, name="InfluxDB")
|
2018-02-08 11:25:26 +00:00
|
|
|
self.queue = queue.Queue()
|
2018-03-04 05:22:31 +00:00
|
|
|
self.influx = influx
|
|
|
|
self.event_to_json = event_to_json
|
2018-02-08 11:25:26 +00:00
|
|
|
self.max_tries = max_tries
|
2018-03-04 05:22:31 +00:00
|
|
|
self.write_errors = 0
|
|
|
|
self.shutdown = False
|
2018-02-08 11:25:26 +00:00
|
|
|
hass.bus.listen(EVENT_STATE_CHANGED, self._event_listener)
|
2017-11-24 00:58:18 +00:00
|
|
|
|
2018-02-08 11:25:26 +00:00
|
|
|
def _event_listener(self, event):
|
|
|
|
"""Listen for new messages on the bus and queue them for Influx."""
|
|
|
|
item = (time.monotonic(), event)
|
|
|
|
self.queue.put(item)
|
2017-11-24 00:58:18 +00:00
|
|
|
|
2018-03-04 05:22:31 +00:00
|
|
|
@staticmethod
|
|
|
|
def batch_timeout():
|
|
|
|
"""Return number of seconds to wait for more events."""
|
|
|
|
return BATCH_TIMEOUT
|
|
|
|
|
|
|
|
def get_events_json(self):
|
|
|
|
"""Return a batch of events formatted for writing."""
|
2019-07-31 19:25:30 +00:00
|
|
|
queue_seconds = QUEUE_BACKLOG_SECONDS + self.max_tries * RETRY_DELAY
|
2017-11-24 00:58:18 +00:00
|
|
|
|
2018-03-04 05:22:31 +00:00
|
|
|
count = 0
|
|
|
|
json = []
|
2017-11-24 00:58:18 +00:00
|
|
|
|
2018-03-04 05:22:31 +00:00
|
|
|
dropped = 0
|
2017-11-24 00:58:18 +00:00
|
|
|
|
2018-03-04 05:22:31 +00:00
|
|
|
try:
|
|
|
|
while len(json) < BATCH_BUFFER_SIZE and not self.shutdown:
|
|
|
|
timeout = None if count == 0 else self.batch_timeout()
|
|
|
|
item = self.queue.get(timeout=timeout)
|
|
|
|
count += 1
|
|
|
|
|
|
|
|
if item is None:
|
|
|
|
self.shutdown = True
|
|
|
|
else:
|
|
|
|
timestamp, event = item
|
|
|
|
age = time.monotonic() - timestamp
|
|
|
|
|
|
|
|
if age < queue_seconds:
|
|
|
|
event_json = self.event_to_json(event)
|
|
|
|
if event_json:
|
|
|
|
json.append(event_json)
|
|
|
|
else:
|
|
|
|
dropped += 1
|
2017-11-24 00:58:18 +00:00
|
|
|
|
2018-03-04 05:22:31 +00:00
|
|
|
except queue.Empty:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if dropped:
|
|
|
|
_LOGGER.warning("Catching up, dropped %d old events", dropped)
|
|
|
|
|
|
|
|
return count, json
|
|
|
|
|
|
|
|
def write_to_influxdb(self, json):
|
|
|
|
"""Write preprocessed events to influxdb, with retry."""
|
|
|
|
from influxdb import exceptions
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
for retry in range(self.max_tries + 1):
|
2018-03-04 05:22:31 +00:00
|
|
|
try:
|
|
|
|
self.influx.write_points(json)
|
|
|
|
|
|
|
|
if self.write_errors:
|
|
|
|
_LOGGER.error("Resumed, lost %d events", self.write_errors)
|
|
|
|
self.write_errors = 0
|
|
|
|
|
|
|
|
_LOGGER.debug("Wrote %d events", len(json))
|
|
|
|
break
|
2019-04-29 11:39:45 +00:00
|
|
|
except (exceptions.InfluxDBClientError, IOError) as err:
|
2018-03-04 05:22:31 +00:00
|
|
|
if retry < self.max_tries:
|
|
|
|
time.sleep(RETRY_DELAY)
|
|
|
|
else:
|
|
|
|
if not self.write_errors:
|
2019-04-29 11:39:45 +00:00
|
|
|
_LOGGER.error("Write error: %s", err)
|
2018-03-04 05:22:31 +00:00
|
|
|
self.write_errors += len(json)
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
"""Process incoming events."""
|
|
|
|
while not self.shutdown:
|
|
|
|
count, json = self.get_events_json()
|
|
|
|
if json:
|
|
|
|
self.write_to_influxdb(json)
|
|
|
|
for _ in range(count):
|
|
|
|
self.queue.task_done()
|
2018-02-08 11:25:26 +00:00
|
|
|
|
|
|
|
def block_till_done(self):
|
|
|
|
"""Block till all events processed."""
|
|
|
|
self.queue.join()
|