Fix Feedreader Atom feeds using `updated` date (#73208)
* Feedreader: Properly support Atom feeds that use only the `updated` date format and resolve #73207. * Revert "Feedreader: Properly support Atom feeds that use only the `updated` date format and resolve #73207." This reverts commitpull/72570/head4dbd11ee04
. * Properly support Atom feeds that use only the `updated` date format and resolve #73207. * Revert "Properly support Atom feeds that use only the `updated` date format and resolve #73207." This reverts commit14366c6a24
. * Properly support Atom feeds that use only the `updated` date format and resolve #73207.
parent
b4a5abce16
commit
f7bd88c952
|
@ -70,6 +70,7 @@ class FeedManager:
|
||||||
self._last_entry_timestamp = None
|
self._last_entry_timestamp = None
|
||||||
self._last_update_successful = False
|
self._last_update_successful = False
|
||||||
self._has_published_parsed = False
|
self._has_published_parsed = False
|
||||||
|
self._has_updated_parsed = False
|
||||||
self._event_type = EVENT_FEEDREADER
|
self._event_type = EVENT_FEEDREADER
|
||||||
self._feed_id = url
|
self._feed_id = url
|
||||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, lambda _: self._update())
|
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, lambda _: self._update())
|
||||||
|
@ -122,7 +123,7 @@ class FeedManager:
|
||||||
)
|
)
|
||||||
self._filter_entries()
|
self._filter_entries()
|
||||||
self._publish_new_entries()
|
self._publish_new_entries()
|
||||||
if self._has_published_parsed:
|
if self._has_published_parsed or self._has_updated_parsed:
|
||||||
self._storage.put_timestamp(
|
self._storage.put_timestamp(
|
||||||
self._feed_id, self._last_entry_timestamp
|
self._feed_id, self._last_entry_timestamp
|
||||||
)
|
)
|
||||||
|
@ -143,7 +144,7 @@ class FeedManager:
|
||||||
|
|
||||||
def _update_and_fire_entry(self, entry):
|
def _update_and_fire_entry(self, entry):
|
||||||
"""Update last_entry_timestamp and fire entry."""
|
"""Update last_entry_timestamp and fire entry."""
|
||||||
# Check if the entry has a published date.
|
# Check if the entry has a published or updated date.
|
||||||
if "published_parsed" in entry and entry.published_parsed:
|
if "published_parsed" in entry and entry.published_parsed:
|
||||||
# We are lucky, `published_parsed` data available, let's make use of
|
# We are lucky, `published_parsed` data available, let's make use of
|
||||||
# it to publish only new available entries since the last run
|
# it to publish only new available entries since the last run
|
||||||
|
@ -151,9 +152,20 @@ class FeedManager:
|
||||||
self._last_entry_timestamp = max(
|
self._last_entry_timestamp = max(
|
||||||
entry.published_parsed, self._last_entry_timestamp
|
entry.published_parsed, self._last_entry_timestamp
|
||||||
)
|
)
|
||||||
|
elif "updated_parsed" in entry and entry.updated_parsed:
|
||||||
|
# We are lucky, `updated_parsed` data available, let's make use of
|
||||||
|
# it to publish only new available entries since the last run
|
||||||
|
self._has_updated_parsed = True
|
||||||
|
self._last_entry_timestamp = max(
|
||||||
|
entry.updated_parsed, self._last_entry_timestamp
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self._has_published_parsed = False
|
self._has_published_parsed = False
|
||||||
_LOGGER.debug("No published_parsed info available for entry %s", entry)
|
self._has_updated_parsed = False
|
||||||
|
_LOGGER.debug(
|
||||||
|
"No published_parsed or updated_parsed info available for entry %s",
|
||||||
|
entry,
|
||||||
|
)
|
||||||
entry.update({"feed_url": self._url})
|
entry.update({"feed_url": self._url})
|
||||||
self._hass.bus.fire(self._event_type, entry)
|
self._hass.bus.fire(self._event_type, entry)
|
||||||
|
|
||||||
|
@ -167,9 +179,16 @@ class FeedManager:
|
||||||
# Set last entry timestamp as epoch time if not available
|
# Set last entry timestamp as epoch time if not available
|
||||||
self._last_entry_timestamp = datetime.utcfromtimestamp(0).timetuple()
|
self._last_entry_timestamp = datetime.utcfromtimestamp(0).timetuple()
|
||||||
for entry in self._feed.entries:
|
for entry in self._feed.entries:
|
||||||
if self._firstrun or (
|
if (
|
||||||
"published_parsed" in entry
|
self._firstrun
|
||||||
and entry.published_parsed > self._last_entry_timestamp
|
or (
|
||||||
|
"published_parsed" in entry
|
||||||
|
and entry.published_parsed > self._last_entry_timestamp
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
"updated_parsed" in entry
|
||||||
|
and entry.updated_parsed > self._last_entry_timestamp
|
||||||
|
)
|
||||||
):
|
):
|
||||||
self._update_and_fire_entry(entry)
|
self._update_and_fire_entry(entry)
|
||||||
new_entries = True
|
new_entries = True
|
||||||
|
|
|
@ -23,6 +23,7 @@ VALID_CONFIG_1 = {feedreader.DOMAIN: {CONF_URLS: [URL]}}
|
||||||
VALID_CONFIG_2 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_SCAN_INTERVAL: 60}}
|
VALID_CONFIG_2 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_SCAN_INTERVAL: 60}}
|
||||||
VALID_CONFIG_3 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 100}}
|
VALID_CONFIG_3 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 100}}
|
||||||
VALID_CONFIG_4 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 5}}
|
VALID_CONFIG_4 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 5}}
|
||||||
|
VALID_CONFIG_5 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 1}}
|
||||||
|
|
||||||
|
|
||||||
def load_fixture_bytes(src):
|
def load_fixture_bytes(src):
|
||||||
|
@ -56,6 +57,12 @@ def fixture_feed_three_events(hass):
|
||||||
return load_fixture_bytes("feedreader3.xml")
|
return load_fixture_bytes("feedreader3.xml")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="feed_atom_event")
|
||||||
|
def fixture_feed_atom_event(hass):
|
||||||
|
"""Load test feed data for atom event."""
|
||||||
|
return load_fixture_bytes("feedreader5.xml")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(name="events")
|
@pytest.fixture(name="events")
|
||||||
async def fixture_events(hass):
|
async def fixture_events(hass):
|
||||||
"""Fixture that catches alexa events."""
|
"""Fixture that catches alexa events."""
|
||||||
|
@ -98,7 +105,7 @@ async def test_setup_max_entries(hass):
|
||||||
|
|
||||||
|
|
||||||
async def test_feed(hass, events, feed_one_event):
|
async def test_feed(hass, events, feed_one_event):
|
||||||
"""Test simple feed with valid data."""
|
"""Test simple rss feed with valid data."""
|
||||||
with patch(
|
with patch(
|
||||||
"feedparser.http.get",
|
"feedparser.http.get",
|
||||||
return_value=feed_one_event,
|
return_value=feed_one_event,
|
||||||
|
@ -120,6 +127,29 @@ async def test_feed(hass, events, feed_one_event):
|
||||||
assert events[0].data.published_parsed.tm_min == 10
|
assert events[0].data.published_parsed.tm_min == 10
|
||||||
|
|
||||||
|
|
||||||
|
async def test_atom_feed(hass, events, feed_atom_event):
|
||||||
|
"""Test simple atom feed with valid data."""
|
||||||
|
with patch(
|
||||||
|
"feedparser.http.get",
|
||||||
|
return_value=feed_atom_event,
|
||||||
|
):
|
||||||
|
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_5)
|
||||||
|
|
||||||
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert len(events) == 1
|
||||||
|
assert events[0].data.title == "Atom-Powered Robots Run Amok"
|
||||||
|
assert events[0].data.description == "Some text."
|
||||||
|
assert events[0].data.link == "http://example.org/2003/12/13/atom03"
|
||||||
|
assert events[0].data.id == "urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a"
|
||||||
|
assert events[0].data.updated_parsed.tm_year == 2003
|
||||||
|
assert events[0].data.updated_parsed.tm_mon == 12
|
||||||
|
assert events[0].data.updated_parsed.tm_mday == 13
|
||||||
|
assert events[0].data.updated_parsed.tm_hour == 18
|
||||||
|
assert events[0].data.updated_parsed.tm_min == 30
|
||||||
|
|
||||||
|
|
||||||
async def test_feed_updates(hass, events, feed_one_event, feed_two_event):
|
async def test_feed_updates(hass, events, feed_one_event, feed_two_event):
|
||||||
"""Test feed updates."""
|
"""Test feed updates."""
|
||||||
side_effect = [
|
side_effect = [
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<feed
|
||||||
|
xmlns="http://www.w3.org/2005/Atom">
|
||||||
|
<title>Example Feed</title>
|
||||||
|
<link href="http://example.org/"/>
|
||||||
|
<updated>2003-12-13T18:30:02Z</updated>
|
||||||
|
<author>
|
||||||
|
<name>John Doe</name>
|
||||||
|
</author>
|
||||||
|
<id>urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6</id>
|
||||||
|
<entry>
|
||||||
|
<title>Atom-Powered Robots Run Amok</title>
|
||||||
|
<link href="http://example.org/2003/12/13/atom03"/>
|
||||||
|
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||||
|
<updated>2003-12-13T18:30:02Z</updated>
|
||||||
|
<summary>Some text.</summary>
|
||||||
|
</entry>
|
||||||
|
</feed>
|
Loading…
Reference in New Issue