Include channel in response to WS thread/list_datasets (#90493)
parent
642984a042
commit
976efb437b
|
@ -1,6 +1,7 @@
|
|||
"""Persistently store thread datasets."""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import dataclasses
|
||||
from datetime import datetime
|
||||
from functools import cached_property
|
||||
|
@ -35,6 +36,15 @@ class DatasetEntry:
|
|||
created: datetime = dataclasses.field(default_factory=dt_util.utcnow)
|
||||
id: str = dataclasses.field(default_factory=ulid_util.ulid)
|
||||
|
||||
@property
|
||||
def channel(self) -> int | None:
|
||||
"""Return channel as an integer."""
|
||||
if (channel := self.dataset.get(tlv_parser.MeshcopTLVType.CHANNEL)) is None:
|
||||
return None
|
||||
with suppress(ValueError):
|
||||
return int(channel, 16)
|
||||
return None
|
||||
|
||||
@cached_property
|
||||
def dataset(self) -> dict[tlv_parser.MeshcopTLVType, str]:
|
||||
"""Return the dataset in dict format."""
|
||||
|
|
|
@ -144,6 +144,7 @@ async def ws_list_datasets(
|
|||
for dataset in store.datasets.values():
|
||||
result.append(
|
||||
{
|
||||
"channel": dataset.channel,
|
||||
"created": dataset.created,
|
||||
"dataset_id": dataset.id,
|
||||
"extended_pan_id": dataset.extended_pan_id,
|
||||
|
|
|
@ -19,6 +19,18 @@ DATASET_1_REORDERED = (
|
|||
"10445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F801021234"
|
||||
)
|
||||
|
||||
DATASET_1_BAD_CHANNEL = (
|
||||
"0E080000000000010000000035060004001FFFE0020811111111222222220708FDAD70BF"
|
||||
"E5AA15DD051000112233445566778899AABBCCDDEEFF030E4F70656E54687265616444656D6F01"
|
||||
"0212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8"
|
||||
)
|
||||
|
||||
DATASET_1_NO_CHANNEL = (
|
||||
"0E08000000000001000035060004001FFFE0020811111111222222220708FDAD70BF"
|
||||
"E5AA15DD051000112233445566778899AABBCCDDEEFF030E4F70656E54687265616444656D6F01"
|
||||
"0212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8"
|
||||
)
|
||||
|
||||
|
||||
async def test_add_invalid_dataset(hass: HomeAssistant) -> None:
|
||||
"""Test adding an invalid dataset."""
|
||||
|
@ -109,6 +121,8 @@ async def test_dataset_properties(hass: HomeAssistant) -> None:
|
|||
{"source": "Google", "tlv": DATASET_1},
|
||||
{"source": "Multipan", "tlv": DATASET_2},
|
||||
{"source": "🎅", "tlv": DATASET_3},
|
||||
{"source": "test1", "tlv": DATASET_1_BAD_CHANNEL},
|
||||
{"source": "test2", "tlv": DATASET_1_NO_CHANNEL},
|
||||
]
|
||||
|
||||
for dataset in datasets:
|
||||
|
@ -122,25 +136,40 @@ async def test_dataset_properties(hass: HomeAssistant) -> None:
|
|||
dataset_2 = dataset
|
||||
if dataset.source == "🎅":
|
||||
dataset_3 = dataset
|
||||
if dataset.source == "test1":
|
||||
dataset_4 = dataset
|
||||
if dataset.source == "test2":
|
||||
dataset_5 = dataset
|
||||
|
||||
dataset = store.async_get(dataset_1.id)
|
||||
assert dataset == dataset_1
|
||||
assert dataset.channel == 15
|
||||
assert dataset.extended_pan_id == "1111111122222222"
|
||||
assert dataset.network_name == "OpenThreadDemo"
|
||||
assert dataset.pan_id == "1234"
|
||||
|
||||
dataset = store.async_get(dataset_2.id)
|
||||
assert dataset == dataset_2
|
||||
assert dataset.channel == 15
|
||||
assert dataset.extended_pan_id == "1111111122222222"
|
||||
assert dataset.network_name == "HomeAssistant!"
|
||||
assert dataset.pan_id == "1234"
|
||||
|
||||
dataset = store.async_get(dataset_3.id)
|
||||
assert dataset == dataset_3
|
||||
assert dataset.channel == 15
|
||||
assert dataset.extended_pan_id == "1111111122222222"
|
||||
assert dataset.network_name == "~🐣🐥🐤~"
|
||||
assert dataset.pan_id == "1234"
|
||||
|
||||
dataset = store.async_get(dataset_4.id)
|
||||
assert dataset == dataset_4
|
||||
assert dataset.channel is None
|
||||
|
||||
dataset = store.async_get(dataset_5.id)
|
||||
assert dataset == dataset_5
|
||||
assert dataset.channel is None
|
||||
|
||||
|
||||
async def test_load_datasets(hass: HomeAssistant) -> None:
|
||||
"""Make sure that we can load/save data correctly."""
|
||||
|
|
|
@ -153,6 +153,7 @@ async def test_list_get_dataset(
|
|||
assert msg["result"] == {
|
||||
"datasets": [
|
||||
{
|
||||
"channel": 15,
|
||||
"created": dataset_1.created.isoformat(),
|
||||
"dataset_id": dataset_1.id,
|
||||
"extended_pan_id": "1111111122222222",
|
||||
|
@ -162,6 +163,7 @@ async def test_list_get_dataset(
|
|||
"source": "Google",
|
||||
},
|
||||
{
|
||||
"channel": 15,
|
||||
"created": dataset_2.created.isoformat(),
|
||||
"dataset_id": dataset_2.id,
|
||||
"extended_pan_id": "1111111122222222",
|
||||
|
@ -171,6 +173,7 @@ async def test_list_get_dataset(
|
|||
"source": "Multipan",
|
||||
},
|
||||
{
|
||||
"channel": 15,
|
||||
"created": dataset_3.created.isoformat(),
|
||||
"dataset_id": dataset_3.id,
|
||||
"extended_pan_id": "1111111122222222",
|
||||
|
|
Loading…
Reference in New Issue