Add live preview to blacklist and whitelist
All checks were successful
Test and build Docker image / docker (push) Successful in 1m58s
All checks were successful
Test and build Docker image / docker (push) Successful in 1m58s
This commit is contained in:
parent
aa8a74ba67
commit
6a3bba5b69
15 changed files with 1539 additions and 370 deletions
|
|
@ -11,6 +11,8 @@ from reader import make_reader
|
|||
|
||||
from discord_rss_bot.filter.blacklist import entry_should_be_skipped
|
||||
from discord_rss_bot.filter.blacklist import feed_has_blacklist_tags
|
||||
from discord_rss_bot.filter.evaluator import evaluate_entry_filters
|
||||
from discord_rss_bot.filter.evaluator import get_filter_values_from_reader
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
|
@ -203,3 +205,54 @@ def test_regex_should_be_skipped() -> None:
|
|||
)
|
||||
reader.delete_tag(feed, "regex_blacklist_author")
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
|
||||
|
||||
def test_whitelist_match_overrides_blacklist_match() -> None:
|
||||
"""A whitelist hit should beat a blacklist hit in the final decision."""
|
||||
reader: Reader = get_reader()
|
||||
|
||||
reader.add_feed(feed_url)
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
reader.update_feeds()
|
||||
|
||||
first_entry: list[Entry] = []
|
||||
entries: Iterable[Entry] = reader.get_entries(feed=feed)
|
||||
for entry in entries:
|
||||
first_entry.append(entry)
|
||||
break
|
||||
|
||||
assert len(first_entry) == 1, f"First entry should be added: {first_entry}"
|
||||
|
||||
reader.set_tag(feed, "blacklist_title", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
||||
reader.set_tag(feed, "whitelist_title", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
||||
|
||||
decision = evaluate_entry_filters(
|
||||
first_entry[0],
|
||||
blacklist_values=get_filter_values_from_reader(reader, feed, "blacklist"),
|
||||
whitelist_values=get_filter_values_from_reader(reader, feed, "whitelist"),
|
||||
)
|
||||
|
||||
assert decision.should_send is True, "Whitelist match should override blacklist match"
|
||||
assert decision.blacklist_match is not None, "Expected a blacklist match"
|
||||
assert decision.whitelist_match is not None, "Expected a whitelist match"
|
||||
assert "whitelist overrides blacklist" in decision.reason
|
||||
|
||||
|
||||
def test_blacklist_substring_match_on_title() -> None:
|
||||
"""Blacklist plain-text rules should match title substrings."""
|
||||
reader: Reader = get_reader()
|
||||
|
||||
reader.add_feed(feed_url)
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
reader.update_feeds()
|
||||
|
||||
first_entry: list[Entry] = []
|
||||
entries: Iterable[Entry] = reader.get_entries(feed=feed)
|
||||
for entry in entries:
|
||||
first_entry.append(entry)
|
||||
break
|
||||
|
||||
assert len(first_entry) == 1, f"First entry should be added: {first_entry}"
|
||||
|
||||
reader.set_tag(feed, "blacklist_title", "vnnnfn") # pyright: ignore[reportArgumentType]
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is True, "Substring title match should blacklist the entry"
|
||||
|
|
|
|||
|
|
@ -37,6 +37,15 @@ def encoded_feed_url(url: str) -> str:
|
|||
return urllib.parse.quote(feed_url) if url else ""
|
||||
|
||||
|
||||
def ensure_preview_feed_exists() -> Reader:
|
||||
reader: Reader = get_reader_dependency()
|
||||
with contextlib.suppress(Exception):
|
||||
reader.add_feed(feed_url)
|
||||
with contextlib.suppress(Exception):
|
||||
reader.update_feed(feed_url)
|
||||
return reader
|
||||
|
||||
|
||||
def test_search() -> None:
|
||||
"""Test the /search page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
|
|
@ -221,6 +230,221 @@ def test_get() -> None:
|
|||
assert response.status_code == 200, f"/whitelist failed: {response.text}"
|
||||
|
||||
|
||||
def test_blacklist_page_uses_live_preview_layout() -> None:
|
||||
ensure_preview_feed_exists()
|
||||
|
||||
response: Response = client.get(url="/blacklist", params={"feed_url": encoded_feed_url(feed_url)})
|
||||
|
||||
assert response.status_code == 200, f"/blacklist failed: {response.text}"
|
||||
assert 'hx-get="/blacklist_preview"' in response.text
|
||||
assert 'id="filter-preview"' in response.text
|
||||
assert "Blacklist Rules" in response.text
|
||||
|
||||
|
||||
def test_whitelist_page_uses_live_preview_layout() -> None:
|
||||
ensure_preview_feed_exists()
|
||||
|
||||
response: Response = client.get(url="/whitelist", params={"feed_url": encoded_feed_url(feed_url)})
|
||||
|
||||
assert response.status_code == 200, f"/whitelist failed: {response.text}"
|
||||
assert 'hx-get="/whitelist_preview"' in response.text
|
||||
assert 'id="filter-preview"' in response.text
|
||||
assert "Whitelist Rules" in response.text
|
||||
|
||||
|
||||
def test_blacklist_preview_does_not_persist_unsaved_rules() -> None:
|
||||
reader: Reader = ensure_preview_feed_exists()
|
||||
reader.set_tag(feed_url, "blacklist_title", "saved-blacklist") # pyright: ignore[reportArgumentType]
|
||||
|
||||
try:
|
||||
response: Response = client.get(
|
||||
url="/blacklist_preview",
|
||||
params={
|
||||
"feed_url": feed_url,
|
||||
"blacklist_title": "fvnnnfnfdnfdnfd",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200, f"/blacklist_preview failed: {response.text}"
|
||||
assert "Live preview" in response.text
|
||||
assert reader.get_tag(feed_url, "blacklist_title", "") == "saved-blacklist"
|
||||
finally:
|
||||
with contextlib.suppress(Exception):
|
||||
reader.delete_tag(feed_url, "blacklist_title")
|
||||
|
||||
|
||||
def test_whitelist_preview_shows_precedence_over_blacklist() -> None:
|
||||
reader: Reader = ensure_preview_feed_exists()
|
||||
reader.set_tag(feed_url, "blacklist_title", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
||||
|
||||
try:
|
||||
response: Response = client.get(
|
||||
url="/whitelist_preview",
|
||||
params={
|
||||
"feed_url": feed_url,
|
||||
"whitelist_title": "fvnnnfnfdnfdnfd",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200, f"/whitelist_preview failed: {response.text}"
|
||||
assert "whitelist overrides blacklist" in response.text
|
||||
assert "Sent" in response.text
|
||||
finally:
|
||||
with contextlib.suppress(Exception):
|
||||
reader.delete_tag(feed_url, "blacklist_title")
|
||||
|
||||
|
||||
def test_blacklist_preview_uses_50_entry_limit() -> None:
|
||||
@dataclass(slots=True)
|
||||
class DummyContent:
|
||||
value: str
|
||||
|
||||
@dataclass(slots=True)
|
||||
class DummyFeed:
|
||||
url: str
|
||||
title: str
|
||||
|
||||
@dataclass(slots=True)
|
||||
class DummyEntry:
|
||||
id: str
|
||||
feed: DummyFeed
|
||||
title: str
|
||||
summary: str
|
||||
author: str
|
||||
link: str
|
||||
published: datetime | None
|
||||
content: list[DummyContent] = field(default_factory=lambda: [DummyContent("content")])
|
||||
|
||||
class StubReader:
|
||||
def __init__(self) -> None:
|
||||
self.feed = DummyFeed(url="https://example.com/filter-preview.xml", title="Preview Feed")
|
||||
self.recorded_limit: int | None = None
|
||||
self.entries: list[Entry] = [
|
||||
cast(
|
||||
"Entry",
|
||||
DummyEntry(
|
||||
id=f"entry-{index}",
|
||||
feed=self.feed,
|
||||
title=f"Entry {index}",
|
||||
summary=f"Summary {index}",
|
||||
author="Author",
|
||||
link=f"https://example.com/entry-{index}",
|
||||
published=datetime(2024, 1, 1, tzinfo=UTC),
|
||||
),
|
||||
)
|
||||
for index in range(60)
|
||||
]
|
||||
|
||||
def get_feed(self, _feed_url: str) -> DummyFeed:
|
||||
return self.feed
|
||||
|
||||
def get_entries(self, **kwargs: object) -> list[Entry]:
|
||||
limit = kwargs.get("limit")
|
||||
self.recorded_limit = limit if isinstance(limit, int) else None
|
||||
if isinstance(limit, int):
|
||||
return self.entries[:limit]
|
||||
return self.entries
|
||||
|
||||
def get_tag(self, _resource: object, _key: str, default: object = None) -> object:
|
||||
return default
|
||||
|
||||
stub_reader = StubReader()
|
||||
app.dependency_overrides[get_reader_dependency] = lambda: stub_reader
|
||||
|
||||
try:
|
||||
with patch("discord_rss_bot.main.create_html_for_feed", return_value="<div>Rendered</div>"):
|
||||
response: Response = client.get(
|
||||
url="/blacklist_preview",
|
||||
params={"feed_url": stub_reader.feed.url},
|
||||
)
|
||||
|
||||
assert response.status_code == 200, f"/blacklist_preview failed: {response.text}"
|
||||
assert stub_reader.recorded_limit == 50, (
|
||||
f"Expected preview to request 50 entries, got {stub_reader.recorded_limit}"
|
||||
)
|
||||
assert "50 checked" in response.text
|
||||
finally:
|
||||
app.dependency_overrides = {}
|
||||
|
||||
|
||||
def test_blacklist_preview_shows_labeled_field_values_for_substring_match() -> None:
|
||||
@dataclass(slots=True)
|
||||
class DummyContent:
|
||||
value: str
|
||||
|
||||
@dataclass(slots=True)
|
||||
class DummyFeed:
|
||||
url: str
|
||||
title: str
|
||||
|
||||
@dataclass(slots=True)
|
||||
class DummyEntry:
|
||||
id: str
|
||||
feed: DummyFeed
|
||||
title: str
|
||||
summary: str
|
||||
author: str
|
||||
link: str
|
||||
published: datetime | None
|
||||
content: list[DummyContent] = field(default_factory=list)
|
||||
|
||||
class StubReader:
|
||||
def __init__(self) -> None:
|
||||
self.feed = DummyFeed(url="https://example.com/wow.xml", title="Warcraft Feed")
|
||||
self.entries: list[Entry] = [
|
||||
cast(
|
||||
"Entry",
|
||||
DummyEntry(
|
||||
id="wow-1",
|
||||
feed=self.feed,
|
||||
title="World of Warcraft",
|
||||
summary="<p>Massive MMO news update</p>",
|
||||
author="Blizzard",
|
||||
link="https://example.com/wow-1",
|
||||
published=datetime(2024, 1, 1, tzinfo=UTC),
|
||||
content=[DummyContent("<p>The expansion launches soon.</p>")],
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
def get_feed(self, _feed_url: str) -> DummyFeed:
|
||||
return self.feed
|
||||
|
||||
def get_entries(self, **_kwargs: object) -> list[Entry]:
|
||||
return self.entries
|
||||
|
||||
def get_tag(self, _resource: object, _key: str, default: object = None) -> object:
|
||||
return default
|
||||
|
||||
stub_reader = StubReader()
|
||||
app.dependency_overrides[get_reader_dependency] = lambda: stub_reader
|
||||
|
||||
try:
|
||||
with patch("discord_rss_bot.main.create_html_for_feed", return_value="<div>Rendered</div>"):
|
||||
response: Response = client.get(
|
||||
url="/blacklist_preview",
|
||||
params={
|
||||
"feed_url": stub_reader.feed.url,
|
||||
"blacklist_title": "orld",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200, f"/blacklist_preview failed: {response.text}"
|
||||
assert "Skipped" in response.text
|
||||
assert "World of Warcraft" in response.text
|
||||
assert "Title" in response.text
|
||||
assert "Author" in response.text
|
||||
assert "Description" in response.text
|
||||
assert "Content" in response.text
|
||||
assert "filter-preview__field-row" in response.text
|
||||
assert "filter-preview__match" in response.text
|
||||
assert '<mark class="filter-preview__match filter-preview__match--danger">orld</mark>' in response.text
|
||||
assert "Massive MMO news update" in response.text
|
||||
assert "The expansion launches soon." in response.text
|
||||
finally:
|
||||
app.dependency_overrides = {}
|
||||
|
||||
|
||||
def test_settings_page_shows_screenshot_layout_setting() -> None:
|
||||
response: Response = client.get(url="/settings")
|
||||
assert response.status_code == 200, f"/settings failed: {response.text}"
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ def test_is_word_in_text() -> None:
|
|||
assert is_word_in_text("word1,word2", "This is a sample text containing word1.") is True, msg_true
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing word2.") is True, msg_true
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing WORD1 and WORD2.") is True, msg_true
|
||||
assert is_word_in_text("orld", "World of Warcraft") is True, msg_true
|
||||
assert is_word_in_text(" orld , craft ", "World of Warcraft") is True, msg_true
|
||||
assert is_word_in_text("Alert,Forma", "Outbreak - Mutagen Mass - Rhea (Saturn)") is False, msg_false
|
||||
assert is_word_in_text("Alert,Forma", "Outbreak - Mutagen Mass - Rhea (Saturn)") is False, msg_false
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing none of the words.") is False, msg_false
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ from reader import Feed
|
|||
from reader import Reader
|
||||
from reader import make_reader
|
||||
|
||||
from discord_rss_bot.filter.evaluator import evaluate_entry_filters
|
||||
from discord_rss_bot.filter.evaluator import get_filter_values_from_reader
|
||||
from discord_rss_bot.filter.whitelist import has_white_tags
|
||||
from discord_rss_bot.filter.whitelist import should_be_sent
|
||||
|
||||
|
|
@ -184,3 +186,54 @@ def test_regex_should_be_sent() -> None:
|
|||
assert should_be_sent(reader, first_entry[0]) is True, "Entry should be sent with newline-separated patterns"
|
||||
reader.delete_tag(feed, "regex_whitelist_author")
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
|
||||
|
||||
def test_active_whitelist_blocks_non_matching_blacklisted_entry() -> None:
|
||||
"""An active whitelist should block non-matching entries even if blacklist also matches."""
|
||||
reader: Reader = get_reader()
|
||||
|
||||
reader.add_feed(feed_url)
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
reader.update_feeds()
|
||||
|
||||
first_entry: list[Entry] = []
|
||||
entries: Iterable[Entry] = reader.get_entries(feed=feed)
|
||||
for entry in entries:
|
||||
first_entry.append(entry)
|
||||
break
|
||||
|
||||
assert len(first_entry) == 1, "First entry should be added"
|
||||
|
||||
reader.set_tag(feed, "blacklist_title", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
||||
reader.set_tag(feed, "whitelist_title", "does-not-match") # pyright: ignore[reportArgumentType]
|
||||
|
||||
decision = evaluate_entry_filters(
|
||||
first_entry[0],
|
||||
blacklist_values=get_filter_values_from_reader(reader, feed, "blacklist"),
|
||||
whitelist_values=get_filter_values_from_reader(reader, feed, "whitelist"),
|
||||
)
|
||||
|
||||
assert decision.should_send is False, "Entry should be skipped when whitelist is active but does not match"
|
||||
assert decision.blacklist_match is not None, "Expected a blacklist match"
|
||||
assert decision.whitelist_match is None, "Expected whitelist to miss"
|
||||
assert "no whitelist rule matched" in decision.reason
|
||||
|
||||
|
||||
def test_whitelist_substring_match_on_title() -> None:
|
||||
"""Whitelist plain-text rules should match title substrings."""
|
||||
reader: Reader = get_reader()
|
||||
|
||||
reader.add_feed(feed_url)
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
reader.update_feeds()
|
||||
|
||||
first_entry: list[Entry] = []
|
||||
entries: Iterable[Entry] = reader.get_entries(feed=feed)
|
||||
for entry in entries:
|
||||
first_entry.append(entry)
|
||||
break
|
||||
|
||||
assert len(first_entry) == 1, "First entry should be added"
|
||||
|
||||
reader.set_tag(feed, "whitelist_title", "vnnnfn") # pyright: ignore[reportArgumentType]
|
||||
assert should_be_sent(reader, first_entry[0]) is True, "Substring title match should whitelist the entry"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue