Prevent overly large event data from being stored in the database (#90747)
This is the same change as #87105 for eventspull/90855/head
parent
0166cd082b
commit
0746e09256
|
@ -88,6 +88,8 @@ TABLE_STATISTICS_SHORT_TERM = "statistics_short_term"
|
||||||
STATISTICS_TABLES = ("statistics", "statistics_short_term")
|
STATISTICS_TABLES = ("statistics", "statistics_short_term")
|
||||||
|
|
||||||
MAX_STATE_ATTRS_BYTES = 16384
|
MAX_STATE_ATTRS_BYTES = 16384
|
||||||
|
MAX_EVENT_DATA_BYTES = 32768
|
||||||
|
|
||||||
PSQL_DIALECT = SupportedDialect.POSTGRESQL
|
PSQL_DIALECT = SupportedDialect.POSTGRESQL
|
||||||
|
|
||||||
ALL_TABLES = [
|
ALL_TABLES = [
|
||||||
|
@ -327,8 +329,18 @@ class EventData(Base):
|
||||||
) -> bytes:
|
) -> bytes:
|
||||||
"""Create shared_data from an event."""
|
"""Create shared_data from an event."""
|
||||||
if dialect == SupportedDialect.POSTGRESQL:
|
if dialect == SupportedDialect.POSTGRESQL:
|
||||||
return json_bytes_strip_null(event.data)
|
bytes_result = json_bytes_strip_null(event.data)
|
||||||
return json_bytes(event.data)
|
bytes_result = json_bytes(event.data)
|
||||||
|
if len(bytes_result) > MAX_EVENT_DATA_BYTES:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Event data for %s exceed maximum size of %s bytes. "
|
||||||
|
"This can cause database performance issues; Event data "
|
||||||
|
"will not be stored",
|
||||||
|
event.event_type,
|
||||||
|
MAX_EVENT_DATA_BYTES,
|
||||||
|
)
|
||||||
|
return b"{}"
|
||||||
|
return bytes_result
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@lru_cache
|
@lru_cache
|
||||||
|
|
|
@ -70,6 +70,7 @@ from homeassistant.core import CoreState, Event, HomeAssistant, callback
|
||||||
from homeassistant.helpers import entity_registry as er, recorder as recorder_helper
|
from homeassistant.helpers import entity_registry as er, recorder as recorder_helper
|
||||||
from homeassistant.setup import async_setup_component, setup_component
|
from homeassistant.setup import async_setup_component, setup_component
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
from homeassistant.util.json import json_loads
|
||||||
|
|
||||||
from .common import (
|
from .common import (
|
||||||
async_block_recorder,
|
async_block_recorder,
|
||||||
|
@ -825,6 +826,34 @@ def test_saving_state_with_oversized_attributes(
|
||||||
assert states[1].attributes == {}
|
assert states[1].attributes == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_saving_event_with_oversized_data(
|
||||||
|
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
"""Test saving events is limited to 32KiB of JSON encoded data."""
|
||||||
|
hass = hass_recorder()
|
||||||
|
massive_dict = {"a": "b" * 32768}
|
||||||
|
event_data = {"test_attr": 5, "test_attr_10": "nice"}
|
||||||
|
hass.bus.fire("test_event", event_data)
|
||||||
|
hass.bus.fire("test_event_too_big", massive_dict)
|
||||||
|
wait_recording_done(hass)
|
||||||
|
events = {}
|
||||||
|
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
for _, data, event_type in (
|
||||||
|
session.query(Events.event_id, EventData.shared_data, EventTypes.event_type)
|
||||||
|
.outerjoin(EventData, Events.data_id == EventData.data_id)
|
||||||
|
.outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id)
|
||||||
|
.where(EventTypes.event_type.in_(["test_event", "test_event_too_big"]))
|
||||||
|
):
|
||||||
|
events[event_type] = data
|
||||||
|
|
||||||
|
assert "test_event_too_big" in caplog.text
|
||||||
|
|
||||||
|
assert len(events) == 2
|
||||||
|
assert json_loads(events["test_event"]) == event_data
|
||||||
|
assert json_loads(events["test_event_too_big"]) == {}
|
||||||
|
|
||||||
|
|
||||||
def test_recorder_setup_failure(hass: HomeAssistant) -> None:
|
def test_recorder_setup_failure(hass: HomeAssistant) -> None:
|
||||||
"""Test some exceptions."""
|
"""Test some exceptions."""
|
||||||
recorder_helper.async_initialize_recorder(hass)
|
recorder_helper.async_initialize_recorder(hass)
|
||||||
|
|
Loading…
Reference in New Issue