Compare commits
No commits in common. "955b94456d9892f415bc07da5bae2a552d526e08" and "b19927af0f0cbf1dd2faa41246c35c1a6e438fca" have entirely different histories.
955b94456d
...
b19927af0f
24 changed files with 573 additions and 1902 deletions
|
|
@ -8,11 +8,15 @@ from discord_rss_bot.filter.blacklist import entry_should_be_skipped
|
||||||
from discord_rss_bot.filter.blacklist import feed_has_blacklist_tags
|
from discord_rss_bot.filter.blacklist import feed_has_blacklist_tags
|
||||||
from discord_rss_bot.filter.whitelist import has_white_tags
|
from discord_rss_bot.filter.whitelist import has_white_tags
|
||||||
from discord_rss_bot.filter.whitelist import should_be_sent
|
from discord_rss_bot.filter.whitelist import should_be_sent
|
||||||
|
from discord_rss_bot.settings import get_reader
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from reader import Entry
|
from reader import Entry
|
||||||
from reader import Reader
|
from reader import Reader
|
||||||
|
|
||||||
|
# Our reader
|
||||||
|
reader: Reader = get_reader()
|
||||||
|
|
||||||
|
|
||||||
@lru_cache
|
@lru_cache
|
||||||
def encode_url(url_to_quote: str) -> str:
|
def encode_url(url_to_quote: str) -> str:
|
||||||
|
|
@ -30,12 +34,11 @@ def encode_url(url_to_quote: str) -> str:
|
||||||
return urllib.parse.quote(string=url_to_quote) if url_to_quote else ""
|
return urllib.parse.quote(string=url_to_quote) if url_to_quote else ""
|
||||||
|
|
||||||
|
|
||||||
def entry_is_whitelisted(entry_to_check: Entry, reader: Reader) -> bool:
|
def entry_is_whitelisted(entry_to_check: Entry) -> bool:
|
||||||
"""Check if the entry is whitelisted.
|
"""Check if the entry is whitelisted.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
entry_to_check: The feed to check.
|
entry_to_check: The feed to check.
|
||||||
reader: Custom Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if the feed is whitelisted, False otherwise.
|
bool: True if the feed is whitelisted, False otherwise.
|
||||||
|
|
@ -44,12 +47,11 @@ def entry_is_whitelisted(entry_to_check: Entry, reader: Reader) -> bool:
|
||||||
return bool(has_white_tags(reader, entry_to_check.feed) and should_be_sent(reader, entry_to_check))
|
return bool(has_white_tags(reader, entry_to_check.feed) and should_be_sent(reader, entry_to_check))
|
||||||
|
|
||||||
|
|
||||||
def entry_is_blacklisted(entry_to_check: Entry, reader: Reader) -> bool:
|
def entry_is_blacklisted(entry_to_check: Entry) -> bool:
|
||||||
"""Check if the entry is blacklisted.
|
"""Check if the entry is blacklisted.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
entry_to_check: The feed to check.
|
entry_to_check: The feed to check.
|
||||||
reader: Custom Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if the feed is blacklisted, False otherwise.
|
bool: True if the feed is blacklisted, False otherwise.
|
||||||
|
|
|
||||||
|
|
@ -5,18 +5,17 @@ import json
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from bs4 import Tag
|
from bs4 import Tag
|
||||||
from markdownify import markdownify
|
from markdownify import markdownify
|
||||||
|
from reader import Entry
|
||||||
|
from reader import Feed
|
||||||
|
from reader import Reader
|
||||||
|
from reader import TagNotFoundError
|
||||||
|
|
||||||
from discord_rss_bot.is_url_valid import is_url_valid
|
from discord_rss_bot.is_url_valid import is_url_valid
|
||||||
|
from discord_rss_bot.settings import get_reader
|
||||||
if TYPE_CHECKING:
|
|
||||||
from reader import Entry
|
|
||||||
from reader import Feed
|
|
||||||
from reader import Reader
|
|
||||||
|
|
||||||
logger: logging.Logger = logging.getLogger(__name__)
|
logger: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -117,18 +116,18 @@ def format_entry_html_for_discord(text: str) -> str:
|
||||||
return _restore_discord_timestamp_tags(formatted_text, replacements)
|
return _restore_discord_timestamp_tags(formatted_text, replacements)
|
||||||
|
|
||||||
|
|
||||||
def replace_tags_in_text_message(entry: Entry, reader: Reader) -> str:
|
def replace_tags_in_text_message(entry: Entry) -> str:
|
||||||
"""Replace tags in custom_message.
|
"""Replace tags in custom_message.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
entry: The entry to get the tags from.
|
entry: The entry to get the tags from.
|
||||||
reader: Custom Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Returns the custom_message with the tags replaced.
|
Returns the custom_message with the tags replaced.
|
||||||
"""
|
"""
|
||||||
feed: Feed = entry.feed
|
feed: Feed = entry.feed
|
||||||
custom_message: str = get_custom_message(feed=feed, reader=reader)
|
custom_reader: Reader = get_reader()
|
||||||
|
custom_message: str = get_custom_message(feed=feed, custom_reader=custom_reader)
|
||||||
|
|
||||||
content = ""
|
content = ""
|
||||||
if entry.content:
|
if entry.content:
|
||||||
|
|
@ -230,18 +229,18 @@ def get_first_image(summary: str | None, content: str | None) -> str:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def replace_tags_in_embed(feed: Feed, entry: Entry, reader: Reader) -> CustomEmbed:
|
def replace_tags_in_embed(feed: Feed, entry: Entry) -> CustomEmbed:
|
||||||
"""Replace tags in embed.
|
"""Replace tags in embed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed: The feed to get the tags from.
|
feed: The feed to get the tags from.
|
||||||
entry: The entry to get the tags from.
|
entry: The entry to get the tags from.
|
||||||
reader: Custom Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Returns the embed with the tags replaced.
|
Returns the embed with the tags replaced.
|
||||||
"""
|
"""
|
||||||
embed: CustomEmbed = get_embed(feed=feed, reader=reader)
|
custom_reader: Reader = get_reader()
|
||||||
|
embed: CustomEmbed = get_embed(feed=feed, custom_reader=custom_reader)
|
||||||
|
|
||||||
content = ""
|
content = ""
|
||||||
if entry.content:
|
if entry.content:
|
||||||
|
|
@ -332,29 +331,31 @@ def _replace_embed_tags(embed: CustomEmbed, template: str, replace_with: str) ->
|
||||||
embed.footer_icon_url = try_to_replace(embed.footer_icon_url, template, replace_with)
|
embed.footer_icon_url = try_to_replace(embed.footer_icon_url, template, replace_with)
|
||||||
|
|
||||||
|
|
||||||
def get_custom_message(reader: Reader, feed: Feed) -> str:
|
def get_custom_message(custom_reader: Reader, feed: Feed) -> str:
|
||||||
"""Get custom_message tag from feed.
|
"""Get custom_message tag from feed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
reader: What Reader to use.
|
custom_reader: What Reader to use.
|
||||||
feed: The feed to get the tag from.
|
feed: The feed to get the tag from.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Returns the contents from the custom_message tag.
|
Returns the contents from the custom_message tag.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
custom_message: str = str(reader.get_tag(feed, "custom_message", ""))
|
custom_message: str = str(custom_reader.get_tag(feed, "custom_message"))
|
||||||
|
except TagNotFoundError:
|
||||||
|
custom_message = ""
|
||||||
except ValueError:
|
except ValueError:
|
||||||
custom_message = ""
|
custom_message = ""
|
||||||
|
|
||||||
return custom_message
|
return custom_message
|
||||||
|
|
||||||
|
|
||||||
def save_embed(reader: Reader, feed: Feed, embed: CustomEmbed) -> None:
|
def save_embed(custom_reader: Reader, feed: Feed, embed: CustomEmbed) -> None:
|
||||||
"""Set embed tag in feed.
|
"""Set embed tag in feed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
reader: What Reader to use.
|
custom_reader: What Reader to use.
|
||||||
feed: The feed to set the tag in.
|
feed: The feed to set the tag in.
|
||||||
embed: The embed to set.
|
embed: The embed to set.
|
||||||
"""
|
"""
|
||||||
|
|
@ -370,20 +371,20 @@ def save_embed(reader: Reader, feed: Feed, embed: CustomEmbed) -> None:
|
||||||
"footer_text": embed.footer_text,
|
"footer_text": embed.footer_text,
|
||||||
"footer_icon_url": embed.footer_icon_url,
|
"footer_icon_url": embed.footer_icon_url,
|
||||||
}
|
}
|
||||||
reader.set_tag(feed, "embed", json.dumps(embed_dict)) # pyright: ignore[reportArgumentType]
|
custom_reader.set_tag(feed, "embed", json.dumps(embed_dict)) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
|
|
||||||
def get_embed(reader: Reader, feed: Feed) -> CustomEmbed:
|
def get_embed(custom_reader: Reader, feed: Feed) -> CustomEmbed:
|
||||||
"""Get embed tag from feed.
|
"""Get embed tag from feed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
reader: What Reader to use.
|
custom_reader: What Reader to use.
|
||||||
feed: The feed to get the tag from.
|
feed: The feed to get the tag from.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Returns the contents from the embed tag.
|
Returns the contents from the embed tag.
|
||||||
"""
|
"""
|
||||||
embed = reader.get_tag(feed, "embed", "")
|
embed = custom_reader.get_tag(feed, "embed", "")
|
||||||
|
|
||||||
if embed:
|
if embed:
|
||||||
if not isinstance(embed, str):
|
if not isinstance(embed, str):
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ from reader import FeedNotFoundError
|
||||||
from reader import Reader
|
from reader import Reader
|
||||||
from reader import ReaderError
|
from reader import ReaderError
|
||||||
from reader import StorageError
|
from reader import StorageError
|
||||||
|
from reader import TagNotFoundError
|
||||||
|
|
||||||
from discord_rss_bot.custom_message import CustomEmbed
|
from discord_rss_bot.custom_message import CustomEmbed
|
||||||
from discord_rss_bot.custom_message import get_custom_message
|
from discord_rss_bot.custom_message import get_custom_message
|
||||||
|
|
@ -36,6 +37,7 @@ from discord_rss_bot.hoyolab_api import extract_post_id_from_hoyolab_url
|
||||||
from discord_rss_bot.hoyolab_api import fetch_hoyolab_post
|
from discord_rss_bot.hoyolab_api import fetch_hoyolab_post
|
||||||
from discord_rss_bot.hoyolab_api import is_c3kay_feed
|
from discord_rss_bot.hoyolab_api import is_c3kay_feed
|
||||||
from discord_rss_bot.is_url_valid import is_url_valid
|
from discord_rss_bot.is_url_valid import is_url_valid
|
||||||
|
from discord_rss_bot.missing_tags import add_missing_tags
|
||||||
from discord_rss_bot.settings import default_custom_message
|
from discord_rss_bot.settings import default_custom_message
|
||||||
from discord_rss_bot.settings import get_reader
|
from discord_rss_bot.settings import get_reader
|
||||||
|
|
||||||
|
|
@ -96,23 +98,26 @@ def extract_domain(url: str) -> str: # noqa: PLR0911
|
||||||
return "Other"
|
return "Other"
|
||||||
|
|
||||||
|
|
||||||
def send_entry_to_discord(entry: Entry, reader: Reader) -> str | None: # noqa: C901
|
def send_entry_to_discord(entry: Entry, custom_reader: Reader | None = None) -> str | None: # noqa: C901, PLR0912
|
||||||
"""Send a single entry to Discord.
|
"""Send a single entry to Discord.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
entry: The entry to send to Discord.
|
entry: The entry to send to Discord.
|
||||||
reader: The reader to use.
|
custom_reader: The reader to use. If None, the default reader will be used.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str | None: The error message if there was an error, otherwise None.
|
str | None: The error message if there was an error, otherwise None.
|
||||||
"""
|
"""
|
||||||
|
# Get the default reader if we didn't get a custom one.
|
||||||
|
reader: Reader = get_reader() if custom_reader is None else custom_reader
|
||||||
|
|
||||||
# Get the webhook URL for the entry.
|
# Get the webhook URL for the entry.
|
||||||
webhook_url: str = str(reader.get_tag(entry.feed_url, "webhook", ""))
|
webhook_url: str = str(reader.get_tag(entry.feed_url, "webhook", ""))
|
||||||
if not webhook_url:
|
if not webhook_url:
|
||||||
return "No webhook URL found."
|
return "No webhook URL found."
|
||||||
|
|
||||||
# If https://discord.com/quests/<quest_id> is in the URL, send a separate message with the URL.
|
# If https://discord.com/quests/<quest_id> is in the URL, send a separate message with the URL.
|
||||||
send_discord_quest_notification(entry, webhook_url, reader=reader)
|
send_discord_quest_notification(entry, webhook_url)
|
||||||
|
|
||||||
# Check if this is a c3kay feed
|
# Check if this is a c3kay feed
|
||||||
if is_c3kay_feed(entry.feed.url):
|
if is_c3kay_feed(entry.feed.url):
|
||||||
|
|
@ -123,7 +128,7 @@ def send_entry_to_discord(entry: Entry, reader: Reader) -> str | None: # noqa:
|
||||||
post_data: dict[str, Any] | None = fetch_hoyolab_post(post_id)
|
post_data: dict[str, Any] | None = fetch_hoyolab_post(post_id)
|
||||||
if post_data:
|
if post_data:
|
||||||
webhook = create_hoyolab_webhook(webhook_url, entry, post_data)
|
webhook = create_hoyolab_webhook(webhook_url, entry, post_data)
|
||||||
execute_webhook(webhook, entry, reader=reader)
|
execute_webhook(webhook, entry)
|
||||||
return None
|
return None
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Failed to create Hoyolab webhook for feed %s, falling back to regular processing",
|
"Failed to create Hoyolab webhook for feed %s, falling back to regular processing",
|
||||||
|
|
@ -137,14 +142,17 @@ def send_entry_to_discord(entry: Entry, reader: Reader) -> str | None: # noqa:
|
||||||
# Try to get the custom message for the feed. If the user has none, we will use the default message.
|
# Try to get the custom message for the feed. If the user has none, we will use the default message.
|
||||||
# This has to be a string for some reason so don't change it to "not custom_message.get_custom_message()"
|
# This has to be a string for some reason so don't change it to "not custom_message.get_custom_message()"
|
||||||
if get_custom_message(reader, entry.feed) != "": # noqa: PLC1901
|
if get_custom_message(reader, entry.feed) != "": # noqa: PLC1901
|
||||||
webhook_message: str = replace_tags_in_text_message(entry=entry, reader=reader)
|
webhook_message: str = replace_tags_in_text_message(entry=entry)
|
||||||
|
|
||||||
if not webhook_message:
|
if not webhook_message:
|
||||||
webhook_message = "No message found."
|
webhook_message = "No message found."
|
||||||
|
|
||||||
# Create the webhook.
|
# Create the webhook.
|
||||||
try:
|
try:
|
||||||
should_send_embed = bool(reader.get_tag(entry.feed, "should_send_embed", True))
|
should_send_embed = bool(reader.get_tag(entry.feed, "should_send_embed"))
|
||||||
|
except TagNotFoundError:
|
||||||
|
logger.exception("No should_send_embed tag found for feed: %s", entry.feed.url)
|
||||||
|
should_send_embed = True
|
||||||
except StorageError:
|
except StorageError:
|
||||||
logger.exception("Error getting should_send_embed tag for feed: %s", entry.feed.url)
|
logger.exception("Error getting should_send_embed tag for feed: %s", entry.feed.url)
|
||||||
should_send_embed = True
|
should_send_embed = True
|
||||||
|
|
@ -154,15 +162,15 @@ def send_entry_to_discord(entry: Entry, reader: Reader) -> str | None: # noqa:
|
||||||
should_send_embed = False
|
should_send_embed = False
|
||||||
|
|
||||||
if should_send_embed:
|
if should_send_embed:
|
||||||
webhook = create_embed_webhook(webhook_url, entry, reader=reader)
|
webhook = create_embed_webhook(webhook_url, entry)
|
||||||
else:
|
else:
|
||||||
webhook: DiscordWebhook = DiscordWebhook(url=webhook_url, content=webhook_message, rate_limit_retry=True)
|
webhook: DiscordWebhook = DiscordWebhook(url=webhook_url, content=webhook_message, rate_limit_retry=True)
|
||||||
|
|
||||||
execute_webhook(webhook, entry, reader=reader)
|
execute_webhook(webhook, entry)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def send_discord_quest_notification(entry: Entry, webhook_url: str, reader: Reader) -> None:
|
def send_discord_quest_notification(entry: Entry, webhook_url: str) -> None:
|
||||||
"""Send a separate message to Discord if the entry is a quest notification."""
|
"""Send a separate message to Discord if the entry is a quest notification."""
|
||||||
quest_regex: re.Pattern[str] = re.compile(r"https://discord\.com/quests/\d+")
|
quest_regex: re.Pattern[str] = re.compile(r"https://discord\.com/quests/\d+")
|
||||||
|
|
||||||
|
|
@ -174,7 +182,7 @@ def send_discord_quest_notification(entry: Entry, webhook_url: str, reader: Read
|
||||||
content=quest_url,
|
content=quest_url,
|
||||||
rate_limit_retry=True,
|
rate_limit_retry=True,
|
||||||
)
|
)
|
||||||
execute_webhook(webhook, entry, reader=reader)
|
execute_webhook(webhook, entry)
|
||||||
|
|
||||||
# Iterate through the content of the entry
|
# Iterate through the content of the entry
|
||||||
for content in entry.content:
|
for content in entry.content:
|
||||||
|
|
@ -232,17 +240,12 @@ def set_title(custom_embed: CustomEmbed, discord_embed: DiscordEmbed) -> None:
|
||||||
discord_embed.set_title(embed_title) if embed_title else None
|
discord_embed.set_title(embed_title) if embed_title else None
|
||||||
|
|
||||||
|
|
||||||
def create_embed_webhook( # noqa: C901
|
def create_embed_webhook(webhook_url: str, entry: Entry) -> DiscordWebhook: # noqa: C901
|
||||||
webhook_url: str,
|
|
||||||
entry: Entry,
|
|
||||||
reader: Reader,
|
|
||||||
) -> DiscordWebhook:
|
|
||||||
"""Create a webhook with an embed.
|
"""Create a webhook with an embed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
webhook_url (str): The webhook URL.
|
webhook_url (str): The webhook URL.
|
||||||
entry (Entry): The entry to send to Discord.
|
entry (Entry): The entry to send to Discord.
|
||||||
reader (Reader): The Reader instance to use for getting embed data.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
DiscordWebhook: The webhook with the embed.
|
DiscordWebhook: The webhook with the embed.
|
||||||
|
|
@ -251,7 +254,7 @@ def create_embed_webhook( # noqa: C901
|
||||||
feed: Feed = entry.feed
|
feed: Feed = entry.feed
|
||||||
|
|
||||||
# Get the embed data from the database.
|
# Get the embed data from the database.
|
||||||
custom_embed: CustomEmbed = replace_tags_in_embed(feed=feed, entry=entry, reader=reader)
|
custom_embed: CustomEmbed = replace_tags_in_embed(feed=feed, entry=entry)
|
||||||
|
|
||||||
discord_embed: DiscordEmbed = DiscordEmbed()
|
discord_embed: DiscordEmbed = DiscordEmbed()
|
||||||
|
|
||||||
|
|
@ -313,14 +316,13 @@ def get_webhook_url(reader: Reader, entry: Entry) -> str:
|
||||||
str: The webhook URL.
|
str: The webhook URL.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
webhook_url: str = str(reader.get_tag(entry.feed_url, "webhook", ""))
|
webhook_url: str = str(reader.get_tag(entry.feed_url, "webhook"))
|
||||||
|
except TagNotFoundError:
|
||||||
|
logger.exception("No webhook URL found for feed: %s", entry.feed.url)
|
||||||
|
return ""
|
||||||
except StorageError:
|
except StorageError:
|
||||||
logger.exception("Storage error getting webhook URL for feed: %s", entry.feed.url)
|
logger.exception("Storage error getting webhook URL for feed: %s", entry.feed.url)
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
if not webhook_url:
|
|
||||||
logger.error("No webhook URL found for feed: %s", entry.feed.url)
|
|
||||||
return ""
|
|
||||||
return webhook_url
|
return webhook_url
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -339,53 +341,53 @@ def set_entry_as_read(reader: Reader, entry: Entry) -> None:
|
||||||
logger.exception("Error setting entry to read: %s", entry.id)
|
logger.exception("Error setting entry to read: %s", entry.id)
|
||||||
|
|
||||||
|
|
||||||
def send_to_discord(reader: Reader | None = None, feed: Feed | None = None, *, do_once: bool = False) -> None: # noqa: C901, PLR0912
|
def send_to_discord(custom_reader: Reader | None = None, feed: Feed | None = None, *, do_once: bool = False) -> None: # noqa: C901, PLR0912
|
||||||
"""Send entries to Discord.
|
"""Send entries to Discord.
|
||||||
|
|
||||||
If response was not ok, we will log the error and mark the entry as unread, so it will be sent again next time.
|
If response was not ok, we will log the error and mark the entry as unread, so it will be sent again next time.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
reader: If we should use a custom reader instead of the default one.
|
custom_reader: If we should use a custom reader instead of the default one.
|
||||||
feed: The feed to send to Discord.
|
feed: The feed to send to Discord.
|
||||||
do_once: If we should only send one entry. This is used in the test.
|
do_once: If we should only send one entry. This is used in the test.
|
||||||
"""
|
"""
|
||||||
logger.info("Starting to send entries to Discord.")
|
logger.info("Starting to send entries to Discord.")
|
||||||
# Get the default reader if we didn't get a custom one.
|
# Get the default reader if we didn't get a custom one.
|
||||||
effective_reader: Reader = get_reader() if reader is None else reader
|
reader: Reader = get_reader() if custom_reader is None else custom_reader
|
||||||
|
|
||||||
# Check for new entries for every feed.
|
# Check for new entries for every feed.
|
||||||
effective_reader.update_feeds(
|
reader.update_feeds(
|
||||||
scheduled=True,
|
scheduled=True,
|
||||||
workers=os.cpu_count() or 1,
|
workers=os.cpu_count() or 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Loop through the unread entries.
|
# Loop through the unread entries.
|
||||||
entries: Iterable[Entry] = effective_reader.get_entries(feed=feed, read=False)
|
entries: Iterable[Entry] = reader.get_entries(feed=feed, read=False)
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
set_entry_as_read(effective_reader, entry)
|
set_entry_as_read(reader, entry)
|
||||||
|
|
||||||
if entry.added < datetime.datetime.now(tz=entry.added.tzinfo) - datetime.timedelta(days=1):
|
if entry.added < datetime.datetime.now(tz=entry.added.tzinfo) - datetime.timedelta(days=1):
|
||||||
logger.info("Entry is older than 24 hours: %s from %s", entry.id, entry.feed.url)
|
logger.info("Entry is older than 24 hours: %s from %s", entry.id, entry.feed.url)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
webhook_url: str = get_webhook_url(effective_reader, entry)
|
webhook_url: str = get_webhook_url(reader, entry)
|
||||||
if not webhook_url:
|
if not webhook_url:
|
||||||
logger.info("No webhook URL found for feed: %s", entry.feed.url)
|
logger.info("No webhook URL found for feed: %s", entry.feed.url)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
should_send_embed: bool = should_send_embed_check(effective_reader, entry)
|
should_send_embed: bool = should_send_embed_check(reader, entry)
|
||||||
|
|
||||||
# Youtube feeds only need to send the link
|
# Youtube feeds only need to send the link
|
||||||
if is_youtube_feed(entry.feed.url):
|
if is_youtube_feed(entry.feed.url):
|
||||||
should_send_embed = False
|
should_send_embed = False
|
||||||
|
|
||||||
if should_send_embed:
|
if should_send_embed:
|
||||||
webhook = create_embed_webhook(webhook_url, entry, reader=effective_reader)
|
webhook = create_embed_webhook(webhook_url, entry)
|
||||||
else:
|
else:
|
||||||
# If the user has set the custom message to an empty string, we will use the default message, otherwise we
|
# If the user has set the custom message to an empty string, we will use the default message, otherwise we
|
||||||
# will use the custom message.
|
# will use the custom message.
|
||||||
if get_custom_message(effective_reader, entry.feed) != "": # noqa: PLC1901
|
if get_custom_message(reader, entry.feed) != "": # noqa: PLC1901
|
||||||
webhook_message = replace_tags_in_text_message(entry, reader=effective_reader)
|
webhook_message = replace_tags_in_text_message(entry)
|
||||||
else:
|
else:
|
||||||
webhook_message: str = str(default_custom_message)
|
webhook_message: str = str(default_custom_message)
|
||||||
|
|
||||||
|
|
@ -395,12 +397,12 @@ def send_to_discord(reader: Reader | None = None, feed: Feed | None = None, *, d
|
||||||
webhook: DiscordWebhook = DiscordWebhook(url=webhook_url, content=webhook_message, rate_limit_retry=True)
|
webhook: DiscordWebhook = DiscordWebhook(url=webhook_url, content=webhook_message, rate_limit_retry=True)
|
||||||
|
|
||||||
# Check if the entry is blacklisted, and if it is, we will skip it.
|
# Check if the entry is blacklisted, and if it is, we will skip it.
|
||||||
if entry_should_be_skipped(effective_reader, entry):
|
if entry_should_be_skipped(reader, entry):
|
||||||
logger.info("Entry was blacklisted: %s", entry.id)
|
logger.info("Entry was blacklisted: %s", entry.id)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Check if the feed has a whitelist, and if it does, check if the entry is whitelisted.
|
# Check if the feed has a whitelist, and if it does, check if the entry is whitelisted.
|
||||||
if has_white_tags(effective_reader, entry.feed) and not should_be_sent(effective_reader, entry):
|
if has_white_tags(reader, entry.feed) and not should_be_sent(reader, entry):
|
||||||
logger.info("Entry was not whitelisted: %s", entry.id)
|
logger.info("Entry was not whitelisted: %s", entry.id)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
@ -413,7 +415,7 @@ def send_to_discord(reader: Reader | None = None, feed: Feed | None = None, *, d
|
||||||
post_data: dict[str, Any] | None = fetch_hoyolab_post(post_id)
|
post_data: dict[str, Any] | None = fetch_hoyolab_post(post_id)
|
||||||
if post_data:
|
if post_data:
|
||||||
webhook = create_hoyolab_webhook(webhook_url, entry, post_data)
|
webhook = create_hoyolab_webhook(webhook_url, entry, post_data)
|
||||||
execute_webhook(webhook, entry, reader=effective_reader)
|
execute_webhook(webhook, entry)
|
||||||
return
|
return
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Failed to create Hoyolab webhook for feed %s, falling back to regular processing",
|
"Failed to create Hoyolab webhook for feed %s, falling back to regular processing",
|
||||||
|
|
@ -423,7 +425,7 @@ def send_to_discord(reader: Reader | None = None, feed: Feed | None = None, *, d
|
||||||
logger.warning("No entry link found for feed %s, falling back to regular processing", entry.feed.url)
|
logger.warning("No entry link found for feed %s, falling back to regular processing", entry.feed.url)
|
||||||
|
|
||||||
# Send the entry to Discord as it is not blacklisted or feed has a whitelist.
|
# Send the entry to Discord as it is not blacklisted or feed has a whitelist.
|
||||||
execute_webhook(webhook, entry, reader=effective_reader)
|
execute_webhook(webhook, entry)
|
||||||
|
|
||||||
# If we only want to send one entry, we will break the loop. This is used when testing this function.
|
# If we only want to send one entry, we will break the loop. This is used when testing this function.
|
||||||
if do_once:
|
if do_once:
|
||||||
|
|
@ -431,15 +433,16 @@ def send_to_discord(reader: Reader | None = None, feed: Feed | None = None, *, d
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
def execute_webhook(webhook: DiscordWebhook, entry: Entry, reader: Reader) -> None:
|
def execute_webhook(webhook: DiscordWebhook, entry: Entry) -> None:
|
||||||
"""Execute the webhook.
|
"""Execute the webhook.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
webhook (DiscordWebhook): The webhook to execute.
|
webhook (DiscordWebhook): The webhook to execute.
|
||||||
entry (Entry): The entry to send to Discord.
|
entry (Entry): The entry to send to Discord.
|
||||||
reader (Reader): The Reader instance to use for checking feed status.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
reader: Reader = get_reader()
|
||||||
|
|
||||||
# If the feed has been paused or deleted, we will not send the entry to Discord.
|
# If the feed has been paused or deleted, we will not send the entry to Discord.
|
||||||
entry_feed: Feed = entry.feed
|
entry_feed: Feed = entry.feed
|
||||||
if entry_feed.updates_enabled is False:
|
if entry_feed.updates_enabled is False:
|
||||||
|
|
@ -490,7 +493,10 @@ def should_send_embed_check(reader: Reader, entry: Entry) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
should_send_embed = bool(reader.get_tag(entry.feed, "should_send_embed", True))
|
should_send_embed = bool(reader.get_tag(entry.feed, "should_send_embed"))
|
||||||
|
except TagNotFoundError:
|
||||||
|
logger.exception("No should_send_embed tag found for feed: %s", entry.feed.url)
|
||||||
|
should_send_embed = True
|
||||||
except ReaderError:
|
except ReaderError:
|
||||||
logger.exception("Error getting should_send_embed tag for feed: %s", entry.feed.url)
|
logger.exception("Error getting should_send_embed tag for feed: %s", entry.feed.url)
|
||||||
should_send_embed = True
|
should_send_embed = True
|
||||||
|
|
@ -545,7 +551,9 @@ def create_feed(reader: Reader, feed_url: str, webhook_dropdown: str) -> None:
|
||||||
reader.add_feed(clean_feed_url)
|
reader.add_feed(clean_feed_url)
|
||||||
except FeedExistsError:
|
except FeedExistsError:
|
||||||
# Add the webhook to an already added feed if it doesn't have a webhook instead of trying to create a new.
|
# Add the webhook to an already added feed if it doesn't have a webhook instead of trying to create a new.
|
||||||
if not reader.get_tag(clean_feed_url, "webhook", ""):
|
try:
|
||||||
|
reader.get_tag(clean_feed_url, "webhook")
|
||||||
|
except TagNotFoundError:
|
||||||
reader.set_tag(clean_feed_url, "webhook", webhook_url) # pyright: ignore[reportArgumentType]
|
reader.set_tag(clean_feed_url, "webhook", webhook_url) # pyright: ignore[reportArgumentType]
|
||||||
except ReaderError as e:
|
except ReaderError as e:
|
||||||
raise HTTPException(status_code=404, detail=f"Error adding feed: {e}") from e
|
raise HTTPException(status_code=404, detail=f"Error adding feed: {e}") from e
|
||||||
|
|
@ -572,3 +580,5 @@ def create_feed(reader: Reader, feed_url: str, webhook_dropdown: str) -> None:
|
||||||
|
|
||||||
# Update the full-text search index so our new feed is searchable.
|
# Update the full-text search index so our new feed is searchable.
|
||||||
reader.update_search()
|
reader.update_search()
|
||||||
|
|
||||||
|
add_missing_tags(reader)
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ if TYPE_CHECKING:
|
||||||
from reader import Reader
|
from reader import Reader
|
||||||
|
|
||||||
|
|
||||||
def feed_has_blacklist_tags(reader: Reader, feed: Feed) -> bool:
|
def feed_has_blacklist_tags(custom_reader: Reader, feed: Feed) -> bool:
|
||||||
"""Return True if the feed has blacklist tags.
|
"""Return True if the feed has blacklist tags.
|
||||||
|
|
||||||
The following tags are checked:
|
The following tags are checked:
|
||||||
|
|
@ -25,21 +25,21 @@ def feed_has_blacklist_tags(reader: Reader, feed: Feed) -> bool:
|
||||||
- regex_blacklist_title
|
- regex_blacklist_title
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
reader: The reader.
|
custom_reader: The reader.
|
||||||
feed: The feed to check.
|
feed: The feed to check.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: If the feed has any of the tags.
|
bool: If the feed has any of the tags.
|
||||||
"""
|
"""
|
||||||
blacklist_author: str = str(reader.get_tag(feed, "blacklist_author", "")).strip()
|
blacklist_author: str = str(custom_reader.get_tag(feed, "blacklist_author", "")).strip()
|
||||||
blacklist_content: str = str(reader.get_tag(feed, "blacklist_content", "")).strip()
|
blacklist_content: str = str(custom_reader.get_tag(feed, "blacklist_content", "")).strip()
|
||||||
blacklist_summary: str = str(reader.get_tag(feed, "blacklist_summary", "")).strip()
|
blacklist_summary: str = str(custom_reader.get_tag(feed, "blacklist_summary", "")).strip()
|
||||||
blacklist_title: str = str(reader.get_tag(feed, "blacklist_title", "")).strip()
|
blacklist_title: str = str(custom_reader.get_tag(feed, "blacklist_title", "")).strip()
|
||||||
|
|
||||||
regex_blacklist_author: str = str(reader.get_tag(feed, "regex_blacklist_author", "")).strip()
|
regex_blacklist_author: str = str(custom_reader.get_tag(feed, "regex_blacklist_author", "")).strip()
|
||||||
regex_blacklist_content: str = str(reader.get_tag(feed, "regex_blacklist_content", "")).strip()
|
regex_blacklist_content: str = str(custom_reader.get_tag(feed, "regex_blacklist_content", "")).strip()
|
||||||
regex_blacklist_summary: str = str(reader.get_tag(feed, "regex_blacklist_summary", "")).strip()
|
regex_blacklist_summary: str = str(custom_reader.get_tag(feed, "regex_blacklist_summary", "")).strip()
|
||||||
regex_blacklist_title: str = str(reader.get_tag(feed, "regex_blacklist_title", "")).strip()
|
regex_blacklist_title: str = str(custom_reader.get_tag(feed, "regex_blacklist_title", "")).strip()
|
||||||
|
|
||||||
return bool(
|
return bool(
|
||||||
blacklist_title
|
blacklist_title
|
||||||
|
|
@ -53,11 +53,11 @@ def feed_has_blacklist_tags(reader: Reader, feed: Feed) -> bool:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def entry_should_be_skipped(reader: Reader, entry: Entry) -> bool: # noqa: PLR0911
|
def entry_should_be_skipped(custom_reader: Reader, entry: Entry) -> bool: # noqa: PLR0911
|
||||||
"""Return True if the entry is in the blacklist.
|
"""Return True if the entry is in the blacklist.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
reader: The reader.
|
custom_reader: The reader.
|
||||||
entry: The entry to check.
|
entry: The entry to check.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
|
@ -65,15 +65,15 @@ def entry_should_be_skipped(reader: Reader, entry: Entry) -> bool: # noqa: PLR0
|
||||||
"""
|
"""
|
||||||
feed = entry.feed
|
feed = entry.feed
|
||||||
|
|
||||||
blacklist_title: str = str(reader.get_tag(feed, "blacklist_title", "")).strip()
|
blacklist_title: str = str(custom_reader.get_tag(feed, "blacklist_title", "")).strip()
|
||||||
blacklist_summary: str = str(reader.get_tag(feed, "blacklist_summary", "")).strip()
|
blacklist_summary: str = str(custom_reader.get_tag(feed, "blacklist_summary", "")).strip()
|
||||||
blacklist_content: str = str(reader.get_tag(feed, "blacklist_content", "")).strip()
|
blacklist_content: str = str(custom_reader.get_tag(feed, "blacklist_content", "")).strip()
|
||||||
blacklist_author: str = str(reader.get_tag(feed, "blacklist_author", "")).strip()
|
blacklist_author: str = str(custom_reader.get_tag(feed, "blacklist_author", "")).strip()
|
||||||
|
|
||||||
regex_blacklist_title: str = str(reader.get_tag(feed, "regex_blacklist_title", "")).strip()
|
regex_blacklist_title: str = str(custom_reader.get_tag(feed, "regex_blacklist_title", "")).strip()
|
||||||
regex_blacklist_summary: str = str(reader.get_tag(feed, "regex_blacklist_summary", "")).strip()
|
regex_blacklist_summary: str = str(custom_reader.get_tag(feed, "regex_blacklist_summary", "")).strip()
|
||||||
regex_blacklist_content: str = str(reader.get_tag(feed, "regex_blacklist_content", "")).strip()
|
regex_blacklist_content: str = str(custom_reader.get_tag(feed, "regex_blacklist_content", "")).strip()
|
||||||
regex_blacklist_author: str = str(reader.get_tag(feed, "regex_blacklist_author", "")).strip()
|
regex_blacklist_author: str = str(custom_reader.get_tag(feed, "regex_blacklist_author", "")).strip()
|
||||||
# TODO(TheLovinator): Also add support for entry_text and more.
|
# TODO(TheLovinator): Also add support for entry_text and more.
|
||||||
|
|
||||||
# Check regular blacklist
|
# Check regular blacklist
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ if TYPE_CHECKING:
|
||||||
from reader import Reader
|
from reader import Reader
|
||||||
|
|
||||||
|
|
||||||
def has_white_tags(reader: Reader, feed: Feed) -> bool:
|
def has_white_tags(custom_reader: Reader, feed: Feed) -> bool:
|
||||||
"""Return True if the feed has whitelist tags.
|
"""Return True if the feed has whitelist tags.
|
||||||
|
|
||||||
The following tags are checked:
|
The following tags are checked:
|
||||||
|
|
@ -25,21 +25,21 @@ def has_white_tags(reader: Reader, feed: Feed) -> bool:
|
||||||
- whitelist_title
|
- whitelist_title
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
reader: The reader.
|
custom_reader: The reader.
|
||||||
feed: The feed to check.
|
feed: The feed to check.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: If the feed has any of the tags.
|
bool: If the feed has any of the tags.
|
||||||
"""
|
"""
|
||||||
whitelist_title: str = str(reader.get_tag(feed, "whitelist_title", "")).strip()
|
whitelist_title: str = str(custom_reader.get_tag(feed, "whitelist_title", "")).strip()
|
||||||
whitelist_summary: str = str(reader.get_tag(feed, "whitelist_summary", "")).strip()
|
whitelist_summary: str = str(custom_reader.get_tag(feed, "whitelist_summary", "")).strip()
|
||||||
whitelist_content: str = str(reader.get_tag(feed, "whitelist_content", "")).strip()
|
whitelist_content: str = str(custom_reader.get_tag(feed, "whitelist_content", "")).strip()
|
||||||
whitelist_author: str = str(reader.get_tag(feed, "whitelist_author", "")).strip()
|
whitelist_author: str = str(custom_reader.get_tag(feed, "whitelist_author", "")).strip()
|
||||||
|
|
||||||
regex_whitelist_title: str = str(reader.get_tag(feed, "regex_whitelist_title", "")).strip()
|
regex_whitelist_title: str = str(custom_reader.get_tag(feed, "regex_whitelist_title", "")).strip()
|
||||||
regex_whitelist_summary: str = str(reader.get_tag(feed, "regex_whitelist_summary", "")).strip()
|
regex_whitelist_summary: str = str(custom_reader.get_tag(feed, "regex_whitelist_summary", "")).strip()
|
||||||
regex_whitelist_content: str = str(reader.get_tag(feed, "regex_whitelist_content", "")).strip()
|
regex_whitelist_content: str = str(custom_reader.get_tag(feed, "regex_whitelist_content", "")).strip()
|
||||||
regex_whitelist_author: str = str(reader.get_tag(feed, "regex_whitelist_author", "")).strip()
|
regex_whitelist_author: str = str(custom_reader.get_tag(feed, "regex_whitelist_author", "")).strip()
|
||||||
|
|
||||||
return bool(
|
return bool(
|
||||||
whitelist_title
|
whitelist_title
|
||||||
|
|
@ -53,11 +53,11 @@ def has_white_tags(reader: Reader, feed: Feed) -> bool:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def should_be_sent(reader: Reader, entry: Entry) -> bool: # noqa: PLR0911
|
def should_be_sent(custom_reader: Reader, entry: Entry) -> bool: # noqa: PLR0911
|
||||||
"""Return True if the entry is in the whitelist.
|
"""Return True if the entry is in the whitelist.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
reader: The reader.
|
custom_reader: The reader.
|
||||||
entry: The entry to check.
|
entry: The entry to check.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
|
@ -65,16 +65,16 @@ def should_be_sent(reader: Reader, entry: Entry) -> bool: # noqa: PLR0911
|
||||||
"""
|
"""
|
||||||
feed: Feed = entry.feed
|
feed: Feed = entry.feed
|
||||||
# Regular whitelist tags
|
# Regular whitelist tags
|
||||||
whitelist_title: str = str(reader.get_tag(feed, "whitelist_title", "")).strip()
|
whitelist_title: str = str(custom_reader.get_tag(feed, "whitelist_title", "")).strip()
|
||||||
whitelist_summary: str = str(reader.get_tag(feed, "whitelist_summary", "")).strip()
|
whitelist_summary: str = str(custom_reader.get_tag(feed, "whitelist_summary", "")).strip()
|
||||||
whitelist_content: str = str(reader.get_tag(feed, "whitelist_content", "")).strip()
|
whitelist_content: str = str(custom_reader.get_tag(feed, "whitelist_content", "")).strip()
|
||||||
whitelist_author: str = str(reader.get_tag(feed, "whitelist_author", "")).strip()
|
whitelist_author: str = str(custom_reader.get_tag(feed, "whitelist_author", "")).strip()
|
||||||
|
|
||||||
# Regex whitelist tags
|
# Regex whitelist tags
|
||||||
regex_whitelist_title: str = str(reader.get_tag(feed, "regex_whitelist_title", "")).strip()
|
regex_whitelist_title: str = str(custom_reader.get_tag(feed, "regex_whitelist_title", "")).strip()
|
||||||
regex_whitelist_summary: str = str(reader.get_tag(feed, "regex_whitelist_summary", "")).strip()
|
regex_whitelist_summary: str = str(custom_reader.get_tag(feed, "regex_whitelist_summary", "")).strip()
|
||||||
regex_whitelist_content: str = str(reader.get_tag(feed, "regex_whitelist_content", "")).strip()
|
regex_whitelist_content: str = str(custom_reader.get_tag(feed, "regex_whitelist_content", "")).strip()
|
||||||
regex_whitelist_author: str = str(reader.get_tag(feed, "regex_whitelist_author", "")).strip()
|
regex_whitelist_author: str = str(custom_reader.get_tag(feed, "regex_whitelist_author", "")).strip()
|
||||||
|
|
||||||
# Check regular whitelist
|
# Check regular whitelist
|
||||||
if entry.title and whitelist_title and is_word_in_text(whitelist_title, entry.title):
|
if entry.title and whitelist_title and is_word_in_text(whitelist_title, entry.title):
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,8 @@ from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from reader import TagNotFoundError
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from reader import Reader
|
from reader import Reader
|
||||||
|
|
||||||
|
|
@ -174,15 +176,21 @@ def export_state(reader: Reader, backup_path: Path) -> None:
|
||||||
logger.exception("Failed to read tag '%s' for feed '%s' during state export", tag, feed.url)
|
logger.exception("Failed to read tag '%s' for feed '%s' during state export", tag, feed.url)
|
||||||
feeds_state.append(feed_data)
|
feeds_state.append(feed_data)
|
||||||
|
|
||||||
|
try:
|
||||||
webhooks: list[str | int | float | bool | dict[str, Any] | list[Any] | None] = list(
|
webhooks: list[str | int | float | bool | dict[str, Any] | list[Any] | None] = list(
|
||||||
reader.get_tag((), "webhooks", []),
|
reader.get_tag((), "webhooks", []),
|
||||||
)
|
)
|
||||||
|
except TagNotFoundError:
|
||||||
|
webhooks = []
|
||||||
|
|
||||||
# Export global update interval if set
|
# Export global update interval if set
|
||||||
global_update_interval: dict[str, Any] | None = None
|
global_update_interval: dict[str, Any] | None = None
|
||||||
|
try:
|
||||||
global_update_config = reader.get_tag((), ".reader.update", None)
|
global_update_config = reader.get_tag((), ".reader.update", None)
|
||||||
if isinstance(global_update_config, dict):
|
if isinstance(global_update_config, dict):
|
||||||
global_update_interval = global_update_config
|
global_update_interval = global_update_config
|
||||||
|
except TagNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
state: dict = {"feeds": feeds_state, "webhooks": webhooks}
|
state: dict = {"feeds": feeds_state, "webhooks": webhooks}
|
||||||
if global_update_interval is not None:
|
if global_update_interval is not None:
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,6 @@ import httpx
|
||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
import uvicorn
|
import uvicorn
|
||||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||||
from fastapi import Depends
|
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from fastapi import Form
|
from fastapi import Form
|
||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
|
|
@ -54,7 +53,7 @@ from discord_rss_bot.feeds import send_entry_to_discord
|
||||||
from discord_rss_bot.feeds import send_to_discord
|
from discord_rss_bot.feeds import send_to_discord
|
||||||
from discord_rss_bot.git_backup import commit_state_change
|
from discord_rss_bot.git_backup import commit_state_change
|
||||||
from discord_rss_bot.git_backup import get_backup_path
|
from discord_rss_bot.git_backup import get_backup_path
|
||||||
from discord_rss_bot.is_url_valid import is_url_valid
|
from discord_rss_bot.missing_tags import add_missing_tags
|
||||||
from discord_rss_bot.search import create_search_context
|
from discord_rss_bot.search import create_search_context
|
||||||
from discord_rss_bot.settings import get_reader
|
from discord_rss_bot.settings import get_reader
|
||||||
|
|
||||||
|
|
@ -101,16 +100,7 @@ LOGGING_CONFIG: dict[str, Any] = {
|
||||||
logging.config.dictConfig(LOGGING_CONFIG)
|
logging.config.dictConfig(LOGGING_CONFIG)
|
||||||
|
|
||||||
logger: logging.Logger = logging.getLogger(__name__)
|
logger: logging.Logger = logging.getLogger(__name__)
|
||||||
|
reader: Reader = get_reader()
|
||||||
|
|
||||||
def get_reader_dependency() -> Reader:
|
|
||||||
"""Provide the app Reader instance as a FastAPI dependency.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Reader: The shared Reader instance.
|
|
||||||
"""
|
|
||||||
return get_reader()
|
|
||||||
|
|
||||||
|
|
||||||
# Time constants for relative time formatting
|
# Time constants for relative time formatting
|
||||||
SECONDS_PER_MINUTE = 60
|
SECONDS_PER_MINUTE = 60
|
||||||
|
|
@ -156,7 +146,7 @@ def relative_time(dt: datetime | None) -> str:
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def lifespan(app: FastAPI) -> AsyncGenerator[None]:
|
async def lifespan(app: FastAPI) -> AsyncGenerator[None]:
|
||||||
"""Lifespan function for the FastAPI app."""
|
"""Lifespan function for the FastAPI app."""
|
||||||
reader: Reader = get_reader()
|
add_missing_tags(reader)
|
||||||
scheduler: AsyncIOScheduler = AsyncIOScheduler(timezone=UTC)
|
scheduler: AsyncIOScheduler = AsyncIOScheduler(timezone=UTC)
|
||||||
scheduler.add_job(
|
scheduler.add_job(
|
||||||
func=send_to_discord,
|
func=send_to_discord,
|
||||||
|
|
@ -180,6 +170,8 @@ templates: Jinja2Templates = Jinja2Templates(directory="discord_rss_bot/template
|
||||||
|
|
||||||
# Add the filters to the Jinja2 environment so they can be used in html templates.
|
# Add the filters to the Jinja2 environment so they can be used in html templates.
|
||||||
templates.env.filters["encode_url"] = lambda url: urllib.parse.quote(url) if url else ""
|
templates.env.filters["encode_url"] = lambda url: urllib.parse.quote(url) if url else ""
|
||||||
|
templates.env.filters["entry_is_whitelisted"] = entry_is_whitelisted
|
||||||
|
templates.env.filters["entry_is_blacklisted"] = entry_is_blacklisted
|
||||||
templates.env.filters["discord_markdown"] = markdownify
|
templates.env.filters["discord_markdown"] = markdownify
|
||||||
templates.env.filters["relative_time"] = relative_time
|
templates.env.filters["relative_time"] = relative_time
|
||||||
templates.env.globals["get_backup_path"] = get_backup_path
|
templates.env.globals["get_backup_path"] = get_backup_path
|
||||||
|
|
@ -189,14 +181,12 @@ templates.env.globals["get_backup_path"] = get_backup_path
|
||||||
async def post_add_webhook(
|
async def post_add_webhook(
|
||||||
webhook_name: Annotated[str, Form()],
|
webhook_name: Annotated[str, Form()],
|
||||||
webhook_url: Annotated[str, Form()],
|
webhook_url: Annotated[str, Form()],
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
) -> RedirectResponse:
|
) -> RedirectResponse:
|
||||||
"""Add a feed to the database.
|
"""Add a feed to the database.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
webhook_name: The name of the webhook.
|
webhook_name: The name of the webhook.
|
||||||
webhook_url: The url of the webhook.
|
webhook_url: The url of the webhook.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the index page.
|
RedirectResponse: Redirect to the index page.
|
||||||
|
|
@ -229,15 +219,11 @@ async def post_add_webhook(
|
||||||
|
|
||||||
|
|
||||||
@app.post("/delete_webhook")
|
@app.post("/delete_webhook")
|
||||||
async def post_delete_webhook(
|
async def post_delete_webhook(webhook_url: Annotated[str, Form()]) -> RedirectResponse:
|
||||||
webhook_url: Annotated[str, Form()],
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
) -> RedirectResponse:
|
|
||||||
"""Delete a webhook from the database.
|
"""Delete a webhook from the database.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
webhook_url: The url of the webhook.
|
webhook_url: The url of the webhook.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the index page.
|
RedirectResponse: Redirect to the index page.
|
||||||
|
|
@ -280,14 +266,12 @@ async def post_delete_webhook(
|
||||||
async def post_create_feed(
|
async def post_create_feed(
|
||||||
feed_url: Annotated[str, Form()],
|
feed_url: Annotated[str, Form()],
|
||||||
webhook_dropdown: Annotated[str, Form()],
|
webhook_dropdown: Annotated[str, Form()],
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
) -> RedirectResponse:
|
) -> RedirectResponse:
|
||||||
"""Add a feed to the database.
|
"""Add a feed to the database.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: The feed to add.
|
feed_url: The feed to add.
|
||||||
webhook_dropdown: The webhook to use.
|
webhook_dropdown: The webhook to use.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page.
|
RedirectResponse: Redirect to the feed page.
|
||||||
|
|
@ -299,15 +283,11 @@ async def post_create_feed(
|
||||||
|
|
||||||
|
|
||||||
@app.post("/pause")
|
@app.post("/pause")
|
||||||
async def post_pause_feed(
|
async def post_pause_feed(feed_url: Annotated[str, Form()]) -> RedirectResponse:
|
||||||
feed_url: Annotated[str, Form()],
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
) -> RedirectResponse:
|
|
||||||
"""Pause a feed.
|
"""Pause a feed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: The feed to pause.
|
feed_url: The feed to pause.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page.
|
RedirectResponse: Redirect to the feed page.
|
||||||
|
|
@ -318,15 +298,11 @@ async def post_pause_feed(
|
||||||
|
|
||||||
|
|
||||||
@app.post("/unpause")
|
@app.post("/unpause")
|
||||||
async def post_unpause_feed(
|
async def post_unpause_feed(feed_url: Annotated[str, Form()]) -> RedirectResponse:
|
||||||
feed_url: Annotated[str, Form()],
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
) -> RedirectResponse:
|
|
||||||
"""Unpause a feed.
|
"""Unpause a feed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: The Feed to unpause.
|
feed_url: The Feed to unpause.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page.
|
RedirectResponse: Redirect to the feed page.
|
||||||
|
|
@ -338,7 +314,6 @@ async def post_unpause_feed(
|
||||||
|
|
||||||
@app.post("/whitelist")
|
@app.post("/whitelist")
|
||||||
async def post_set_whitelist(
|
async def post_set_whitelist(
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
whitelist_title: Annotated[str, Form()] = "",
|
whitelist_title: Annotated[str, Form()] = "",
|
||||||
whitelist_summary: Annotated[str, Form()] = "",
|
whitelist_summary: Annotated[str, Form()] = "",
|
||||||
whitelist_content: Annotated[str, Form()] = "",
|
whitelist_content: Annotated[str, Form()] = "",
|
||||||
|
|
@ -361,7 +336,6 @@ async def post_set_whitelist(
|
||||||
regex_whitelist_content: Whitelisted regex for when checking the content.
|
regex_whitelist_content: Whitelisted regex for when checking the content.
|
||||||
regex_whitelist_author: Whitelisted regex for when checking the author.
|
regex_whitelist_author: Whitelisted regex for when checking the author.
|
||||||
feed_url: The feed we should set the whitelist for.
|
feed_url: The feed we should set the whitelist for.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page.
|
RedirectResponse: Redirect to the feed page.
|
||||||
|
|
@ -382,17 +356,12 @@ async def post_set_whitelist(
|
||||||
|
|
||||||
|
|
||||||
@app.get("/whitelist", response_class=HTMLResponse)
|
@app.get("/whitelist", response_class=HTMLResponse)
|
||||||
async def get_whitelist(
|
async def get_whitelist(feed_url: str, request: Request):
|
||||||
feed_url: str,
|
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
):
|
|
||||||
"""Get the whitelist.
|
"""Get the whitelist.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: What feed we should get the whitelist for.
|
feed_url: What feed we should get the whitelist for.
|
||||||
request: The request object.
|
request: The request object.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The whitelist page.
|
HTMLResponse: The whitelist page.
|
||||||
|
|
@ -426,7 +395,6 @@ async def get_whitelist(
|
||||||
|
|
||||||
@app.post("/blacklist")
|
@app.post("/blacklist")
|
||||||
async def post_set_blacklist(
|
async def post_set_blacklist(
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
blacklist_title: Annotated[str, Form()] = "",
|
blacklist_title: Annotated[str, Form()] = "",
|
||||||
blacklist_summary: Annotated[str, Form()] = "",
|
blacklist_summary: Annotated[str, Form()] = "",
|
||||||
blacklist_content: Annotated[str, Form()] = "",
|
blacklist_content: Annotated[str, Form()] = "",
|
||||||
|
|
@ -452,7 +420,6 @@ async def post_set_blacklist(
|
||||||
regex_blacklist_content: Blacklisted regex for when checking the content.
|
regex_blacklist_content: Blacklisted regex for when checking the content.
|
||||||
regex_blacklist_author: Blacklisted regex for when checking the author.
|
regex_blacklist_author: Blacklisted regex for when checking the author.
|
||||||
feed_url: What feed we should set the blacklist for.
|
feed_url: What feed we should set the blacklist for.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page.
|
RedirectResponse: Redirect to the feed page.
|
||||||
|
|
@ -471,17 +438,12 @@ async def post_set_blacklist(
|
||||||
|
|
||||||
|
|
||||||
@app.get("/blacklist", response_class=HTMLResponse)
|
@app.get("/blacklist", response_class=HTMLResponse)
|
||||||
async def get_blacklist(
|
async def get_blacklist(feed_url: str, request: Request):
|
||||||
feed_url: str,
|
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
):
|
|
||||||
"""Get the blacklist.
|
"""Get the blacklist.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: What feed we should get the blacklist for.
|
feed_url: What feed we should get the blacklist for.
|
||||||
request: The request object.
|
request: The request object.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The blacklist page.
|
HTMLResponse: The blacklist page.
|
||||||
|
|
@ -515,7 +477,6 @@ async def get_blacklist(
|
||||||
@app.post("/custom")
|
@app.post("/custom")
|
||||||
async def post_set_custom(
|
async def post_set_custom(
|
||||||
feed_url: Annotated[str, Form()],
|
feed_url: Annotated[str, Form()],
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
custom_message: Annotated[str, Form()] = "",
|
custom_message: Annotated[str, Form()] = "",
|
||||||
) -> RedirectResponse:
|
) -> RedirectResponse:
|
||||||
"""Set the custom message, this is used when sending the message.
|
"""Set the custom message, this is used when sending the message.
|
||||||
|
|
@ -523,7 +484,6 @@ async def post_set_custom(
|
||||||
Args:
|
Args:
|
||||||
custom_message: The custom message.
|
custom_message: The custom message.
|
||||||
feed_url: The feed we should set the custom message for.
|
feed_url: The feed we should set the custom message for.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page.
|
RedirectResponse: Redirect to the feed page.
|
||||||
|
|
@ -545,17 +505,12 @@ async def post_set_custom(
|
||||||
|
|
||||||
|
|
||||||
@app.get("/custom", response_class=HTMLResponse)
|
@app.get("/custom", response_class=HTMLResponse)
|
||||||
async def get_custom(
|
async def get_custom(feed_url: str, request: Request):
|
||||||
feed_url: str,
|
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
):
|
|
||||||
"""Get the custom message. This is used when sending the message to Discord.
|
"""Get the custom message. This is used when sending the message to Discord.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: What feed we should get the custom message for.
|
feed_url: What feed we should get the custom message for.
|
||||||
request: The request object.
|
request: The request object.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The custom message page.
|
HTMLResponse: The custom message page.
|
||||||
|
|
@ -576,17 +531,12 @@ async def get_custom(
|
||||||
|
|
||||||
|
|
||||||
@app.get("/embed", response_class=HTMLResponse)
|
@app.get("/embed", response_class=HTMLResponse)
|
||||||
async def get_embed_page(
|
async def get_embed_page(feed_url: str, request: Request):
|
||||||
feed_url: str,
|
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
):
|
|
||||||
"""Get the custom message. This is used when sending the message to Discord.
|
"""Get the custom message. This is used when sending the message to Discord.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: What feed we should get the custom message for.
|
feed_url: What feed we should get the custom message for.
|
||||||
request: The request object.
|
request: The request object.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The embed page.
|
HTMLResponse: The embed page.
|
||||||
|
|
@ -622,7 +572,6 @@ async def get_embed_page(
|
||||||
@app.post("/embed", response_class=HTMLResponse)
|
@app.post("/embed", response_class=HTMLResponse)
|
||||||
async def post_embed(
|
async def post_embed(
|
||||||
feed_url: Annotated[str, Form()],
|
feed_url: Annotated[str, Form()],
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
title: Annotated[str, Form()] = "",
|
title: Annotated[str, Form()] = "",
|
||||||
description: Annotated[str, Form()] = "",
|
description: Annotated[str, Form()] = "",
|
||||||
color: Annotated[str, Form()] = "",
|
color: Annotated[str, Form()] = "",
|
||||||
|
|
@ -648,7 +597,7 @@ async def post_embed(
|
||||||
author_icon_url: The author icon url of the embed.
|
author_icon_url: The author icon url of the embed.
|
||||||
footer_text: The footer text of the embed.
|
footer_text: The footer text of the embed.
|
||||||
footer_icon_url: The footer icon url of the embed.
|
footer_icon_url: The footer icon url of the embed.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the embed page.
|
RedirectResponse: Redirect to the embed page.
|
||||||
|
|
@ -676,15 +625,11 @@ async def post_embed(
|
||||||
|
|
||||||
|
|
||||||
@app.post("/use_embed")
|
@app.post("/use_embed")
|
||||||
async def post_use_embed(
|
async def post_use_embed(feed_url: Annotated[str, Form()]) -> RedirectResponse:
|
||||||
feed_url: Annotated[str, Form()],
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
) -> RedirectResponse:
|
|
||||||
"""Use embed instead of text.
|
"""Use embed instead of text.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: The feed to change.
|
feed_url: The feed to change.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page.
|
RedirectResponse: Redirect to the feed page.
|
||||||
|
|
@ -696,15 +641,11 @@ async def post_use_embed(
|
||||||
|
|
||||||
|
|
||||||
@app.post("/use_text")
|
@app.post("/use_text")
|
||||||
async def post_use_text(
|
async def post_use_text(feed_url: Annotated[str, Form()]) -> RedirectResponse:
|
||||||
feed_url: Annotated[str, Form()],
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
) -> RedirectResponse:
|
|
||||||
"""Use text instead of embed.
|
"""Use text instead of embed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: The feed to change.
|
feed_url: The feed to change.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page.
|
RedirectResponse: Redirect to the feed page.
|
||||||
|
|
@ -718,7 +659,6 @@ async def post_use_text(
|
||||||
@app.post("/set_update_interval")
|
@app.post("/set_update_interval")
|
||||||
async def post_set_update_interval(
|
async def post_set_update_interval(
|
||||||
feed_url: Annotated[str, Form()],
|
feed_url: Annotated[str, Form()],
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
interval_minutes: Annotated[int | None, Form()] = None,
|
interval_minutes: Annotated[int | None, Form()] = None,
|
||||||
redirect_to: Annotated[str, Form()] = "",
|
redirect_to: Annotated[str, Form()] = "",
|
||||||
) -> RedirectResponse:
|
) -> RedirectResponse:
|
||||||
|
|
@ -728,7 +668,6 @@ async def post_set_update_interval(
|
||||||
feed_url: The feed to change.
|
feed_url: The feed to change.
|
||||||
interval_minutes: The update interval in minutes (None to reset to global default).
|
interval_minutes: The update interval in minutes (None to reset to global default).
|
||||||
redirect_to: Optional redirect URL (defaults to feed page).
|
redirect_to: Optional redirect URL (defaults to feed page).
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the specified page or feed page.
|
RedirectResponse: Redirect to the specified page or feed page.
|
||||||
|
|
@ -764,14 +703,12 @@ async def post_set_update_interval(
|
||||||
async def post_change_feed_url(
|
async def post_change_feed_url(
|
||||||
old_feed_url: Annotated[str, Form()],
|
old_feed_url: Annotated[str, Form()],
|
||||||
new_feed_url: Annotated[str, Form()],
|
new_feed_url: Annotated[str, Form()],
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
) -> RedirectResponse:
|
) -> RedirectResponse:
|
||||||
"""Change the URL for an existing feed.
|
"""Change the URL for an existing feed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
old_feed_url: Current feed URL.
|
old_feed_url: Current feed URL.
|
||||||
new_feed_url: New feed URL to change to.
|
new_feed_url: New feed URL to change to.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page for the resulting URL.
|
RedirectResponse: Redirect to the feed page for the resulting URL.
|
||||||
|
|
@ -797,19 +734,6 @@ async def post_change_feed_url(
|
||||||
except ReaderError as e:
|
except ReaderError as e:
|
||||||
raise HTTPException(status_code=400, detail=f"Failed to change feed URL: {e}") from e
|
raise HTTPException(status_code=400, detail=f"Failed to change feed URL: {e}") from e
|
||||||
|
|
||||||
# Update the feed with the new URL so we can discover what entries it returns.
|
|
||||||
# Then mark all unread entries as read so the scheduler doesn't resend them.
|
|
||||||
try:
|
|
||||||
reader.update_feed(clean_new_feed_url)
|
|
||||||
except Exception:
|
|
||||||
logger.exception("Failed to update feed after URL change: %s", clean_new_feed_url)
|
|
||||||
|
|
||||||
for entry in reader.get_entries(feed=clean_new_feed_url, read=False):
|
|
||||||
try:
|
|
||||||
reader.set_entry_read(entry, True)
|
|
||||||
except Exception:
|
|
||||||
logger.exception("Failed to mark entry as read after URL change: %s", entry.id)
|
|
||||||
|
|
||||||
commit_state_change(reader, f"Change feed URL from {clean_old_feed_url} to {clean_new_feed_url}")
|
commit_state_change(reader, f"Change feed URL from {clean_old_feed_url} to {clean_new_feed_url}")
|
||||||
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(clean_new_feed_url)}", status_code=303)
|
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(clean_new_feed_url)}", status_code=303)
|
||||||
|
|
||||||
|
|
@ -817,7 +741,6 @@ async def post_change_feed_url(
|
||||||
@app.post("/reset_update_interval")
|
@app.post("/reset_update_interval")
|
||||||
async def post_reset_update_interval(
|
async def post_reset_update_interval(
|
||||||
feed_url: Annotated[str, Form()],
|
feed_url: Annotated[str, Form()],
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
redirect_to: Annotated[str, Form()] = "",
|
redirect_to: Annotated[str, Form()] = "",
|
||||||
) -> RedirectResponse:
|
) -> RedirectResponse:
|
||||||
"""Reset the update interval for a feed to use the global default.
|
"""Reset the update interval for a feed to use the global default.
|
||||||
|
|
@ -825,7 +748,6 @@ async def post_reset_update_interval(
|
||||||
Args:
|
Args:
|
||||||
feed_url: The feed to change.
|
feed_url: The feed to change.
|
||||||
redirect_to: Optional redirect URL (defaults to feed page).
|
redirect_to: Optional redirect URL (defaults to feed page).
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the specified page or feed page.
|
RedirectResponse: Redirect to the specified page or feed page.
|
||||||
|
|
@ -852,15 +774,11 @@ async def post_reset_update_interval(
|
||||||
|
|
||||||
|
|
||||||
@app.post("/set_global_update_interval")
|
@app.post("/set_global_update_interval")
|
||||||
async def post_set_global_update_interval(
|
async def post_set_global_update_interval(interval_minutes: Annotated[int, Form()]) -> RedirectResponse:
|
||||||
interval_minutes: Annotated[int, Form()],
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
) -> RedirectResponse:
|
|
||||||
"""Set the global default update interval.
|
"""Set the global default update interval.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
interval_minutes: The update interval in minutes.
|
interval_minutes: The update interval in minutes.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the settings page.
|
RedirectResponse: Redirect to the settings page.
|
||||||
|
|
@ -874,15 +792,11 @@ async def post_set_global_update_interval(
|
||||||
|
|
||||||
|
|
||||||
@app.get("/add", response_class=HTMLResponse)
|
@app.get("/add", response_class=HTMLResponse)
|
||||||
def get_add(
|
def get_add(request: Request):
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
):
|
|
||||||
"""Page for adding a new feed.
|
"""Page for adding a new feed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request: The request object.
|
request: The request object.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The add feed page.
|
HTMLResponse: The add feed page.
|
||||||
|
|
@ -895,19 +809,13 @@ def get_add(
|
||||||
|
|
||||||
|
|
||||||
@app.get("/feed", response_class=HTMLResponse)
|
@app.get("/feed", response_class=HTMLResponse)
|
||||||
async def get_feed( # noqa: C901, PLR0912, PLR0914, PLR0915
|
async def get_feed(feed_url: str, request: Request, starting_after: str = ""): # noqa: C901, PLR0912, PLR0914, PLR0915
|
||||||
feed_url: str,
|
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
starting_after: str = "",
|
|
||||||
):
|
|
||||||
"""Get a feed by URL.
|
"""Get a feed by URL.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: The feed to add.
|
feed_url: The feed to add.
|
||||||
request: The request object.
|
request: The request object.
|
||||||
starting_after: The entry to start after. Used for pagination.
|
starting_after: The entry to start after. Used for pagination.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The feed page.
|
HTMLResponse: The feed page.
|
||||||
|
|
@ -937,22 +845,28 @@ async def get_feed( # noqa: C901, PLR0912, PLR0914, PLR0915
|
||||||
except EntryNotFoundError as e:
|
except EntryNotFoundError as e:
|
||||||
current_entries = list(reader.get_entries(feed=clean_feed_url))
|
current_entries = list(reader.get_entries(feed=clean_feed_url))
|
||||||
msg: str = f"{e}\n\n{[entry.id for entry in current_entries]}"
|
msg: str = f"{e}\n\n{[entry.id for entry in current_entries]}"
|
||||||
html: str = create_html_for_feed(reader=reader, entries=current_entries, current_feed_url=clean_feed_url)
|
html: str = create_html_for_feed(current_entries, clean_feed_url)
|
||||||
|
|
||||||
# Get feed and global intervals for error case too
|
# Get feed and global intervals for error case too
|
||||||
feed_interval: int | None = None
|
feed_interval: int | None = None
|
||||||
feed_update_config = reader.get_tag(feed, ".reader.update", None)
|
try:
|
||||||
|
feed_update_config = reader.get_tag(feed, ".reader.update")
|
||||||
if isinstance(feed_update_config, dict) and "interval" in feed_update_config:
|
if isinstance(feed_update_config, dict) and "interval" in feed_update_config:
|
||||||
interval_value = feed_update_config["interval"]
|
interval_value = feed_update_config["interval"]
|
||||||
if isinstance(interval_value, int):
|
if isinstance(interval_value, int):
|
||||||
feed_interval = interval_value
|
feed_interval = interval_value
|
||||||
|
except TagNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
global_interval: int = 60
|
global_interval: int = 60
|
||||||
global_update_config = reader.get_tag((), ".reader.update", None)
|
try:
|
||||||
|
global_update_config = reader.get_tag((), ".reader.update")
|
||||||
if isinstance(global_update_config, dict) and "interval" in global_update_config:
|
if isinstance(global_update_config, dict) and "interval" in global_update_config:
|
||||||
interval_value = global_update_config["interval"]
|
interval_value = global_update_config["interval"]
|
||||||
if isinstance(interval_value, int):
|
if isinstance(interval_value, int):
|
||||||
global_interval = interval_value
|
global_interval = interval_value
|
||||||
|
except TagNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
context = {
|
context = {
|
||||||
"request": request,
|
"request": request,
|
||||||
|
|
@ -987,25 +901,36 @@ async def get_feed( # noqa: C901, PLR0912, PLR0914, PLR0915
|
||||||
last_entry = entries[-1]
|
last_entry = entries[-1]
|
||||||
|
|
||||||
# Create the html for the entries.
|
# Create the html for the entries.
|
||||||
html: str = create_html_for_feed(reader=reader, entries=entries, current_feed_url=clean_feed_url)
|
html: str = create_html_for_feed(entries, clean_feed_url)
|
||||||
|
|
||||||
should_send_embed: bool = bool(reader.get_tag(feed, "should_send_embed", True))
|
try:
|
||||||
|
should_send_embed: bool = bool(reader.get_tag(feed, "should_send_embed"))
|
||||||
|
except TagNotFoundError:
|
||||||
|
add_missing_tags(reader)
|
||||||
|
should_send_embed: bool = bool(reader.get_tag(feed, "should_send_embed"))
|
||||||
|
|
||||||
# Get the update interval for this feed
|
# Get the update interval for this feed
|
||||||
feed_interval: int | None = None
|
feed_interval: int | None = None
|
||||||
feed_update_config = reader.get_tag(feed, ".reader.update", None)
|
try:
|
||||||
|
feed_update_config = reader.get_tag(feed, ".reader.update")
|
||||||
if isinstance(feed_update_config, dict) and "interval" in feed_update_config:
|
if isinstance(feed_update_config, dict) and "interval" in feed_update_config:
|
||||||
interval_value = feed_update_config["interval"]
|
interval_value = feed_update_config["interval"]
|
||||||
if isinstance(interval_value, int):
|
if isinstance(interval_value, int):
|
||||||
feed_interval = interval_value
|
feed_interval = interval_value
|
||||||
|
except TagNotFoundError:
|
||||||
|
# No custom interval set for this feed, will use global default
|
||||||
|
pass
|
||||||
|
|
||||||
# Get the global default update interval
|
# Get the global default update interval
|
||||||
global_interval: int = 60 # Default to 60 minutes if not set
|
global_interval: int = 60 # Default to 60 minutes if not set
|
||||||
global_update_config = reader.get_tag((), ".reader.update", None)
|
try:
|
||||||
|
global_update_config = reader.get_tag((), ".reader.update")
|
||||||
if isinstance(global_update_config, dict) and "interval" in global_update_config:
|
if isinstance(global_update_config, dict) and "interval" in global_update_config:
|
||||||
interval_value = global_update_config["interval"]
|
interval_value = global_update_config["interval"]
|
||||||
if isinstance(interval_value, int):
|
if isinstance(interval_value, int):
|
||||||
global_interval = interval_value
|
global_interval = interval_value
|
||||||
|
except TagNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
context = {
|
context = {
|
||||||
"request": request,
|
"request": request,
|
||||||
|
|
@ -1023,15 +948,10 @@ async def get_feed( # noqa: C901, PLR0912, PLR0914, PLR0915
|
||||||
return templates.TemplateResponse(request=request, name="feed.html", context=context)
|
return templates.TemplateResponse(request=request, name="feed.html", context=context)
|
||||||
|
|
||||||
|
|
||||||
def create_html_for_feed( # noqa: C901, PLR0914
|
def create_html_for_feed(entries: Iterable[Entry], current_feed_url: str = "") -> str: # noqa: C901, PLR0914
|
||||||
reader: Reader,
|
|
||||||
entries: Iterable[Entry],
|
|
||||||
current_feed_url: str = "",
|
|
||||||
) -> str:
|
|
||||||
"""Create HTML for the search results.
|
"""Create HTML for the search results.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
reader: The Reader instance to use.
|
|
||||||
entries: The entries to create HTML for.
|
entries: The entries to create HTML for.
|
||||||
current_feed_url: The feed URL currently being viewed in /feed.
|
current_feed_url: The feed URL currently being viewed in /feed.
|
||||||
|
|
||||||
|
|
@ -1049,19 +969,17 @@ def create_html_for_feed( # noqa: C901, PLR0914
|
||||||
|
|
||||||
first_image = get_first_image(summary, content)
|
first_image = get_first_image(summary, content)
|
||||||
|
|
||||||
text: str = replace_tags_in_text_message(entry, reader=reader) or (
|
text: str = replace_tags_in_text_message(entry) or "<div class='text-muted'>No content available.</div>"
|
||||||
"<div class='text-muted'>No content available.</div>"
|
|
||||||
)
|
|
||||||
published = ""
|
published = ""
|
||||||
if entry.published:
|
if entry.published:
|
||||||
published: str = entry.published.strftime("%Y-%m-%d %H:%M:%S")
|
published: str = entry.published.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
blacklisted: str = ""
|
blacklisted: str = ""
|
||||||
if entry_is_blacklisted(entry, reader=reader):
|
if entry_is_blacklisted(entry):
|
||||||
blacklisted = "<span class='badge bg-danger'>Blacklisted</span>"
|
blacklisted = "<span class='badge bg-danger'>Blacklisted</span>"
|
||||||
|
|
||||||
whitelisted: str = ""
|
whitelisted: str = ""
|
||||||
if entry_is_whitelisted(entry, reader=reader):
|
if entry_is_whitelisted(entry):
|
||||||
whitelisted = "<span class='badge bg-success'>Whitelisted</span>"
|
whitelisted = "<span class='badge bg-success'>Whitelisted</span>"
|
||||||
|
|
||||||
source_feed_url: str = getattr(entry, "original_feed_url", None) or entry.feed.url
|
source_feed_url: str = getattr(entry, "original_feed_url", None) or entry.feed.url
|
||||||
|
|
@ -1081,11 +999,7 @@ def create_html_for_feed( # noqa: C901, PLR0914
|
||||||
)
|
)
|
||||||
|
|
||||||
entry_id: str = urllib.parse.quote(entry.id)
|
entry_id: str = urllib.parse.quote(entry.id)
|
||||||
encoded_source_feed_url: str = urllib.parse.quote(source_feed_url)
|
to_discord_html: str = f"<a class='text-muted' href='/post_entry?entry_id={entry_id}'>Send to Discord</a>"
|
||||||
to_discord_html: str = (
|
|
||||||
f"<a class='text-muted' href='/post_entry?entry_id={entry_id}&feed_url={encoded_source_feed_url}'>"
|
|
||||||
"Send to Discord</a>"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if this is a YouTube feed entry and the entry has a link
|
# Check if this is a YouTube feed entry and the entry has a link
|
||||||
is_youtube_feed = "youtube.com/feeds/videos.xml" in entry.feed.url
|
is_youtube_feed = "youtube.com/feeds/videos.xml" in entry.feed.url
|
||||||
|
|
@ -1156,7 +1070,6 @@ def get_data_from_hook_url(hook_name: str, hook_url: str) -> WebhookInfo:
|
||||||
hook_name (str): The webhook name.
|
hook_name (str): The webhook name.
|
||||||
hook_url (str): The webhook URL.
|
hook_url (str): The webhook URL.
|
||||||
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
WebhookInfo: The webhook username, avatar, guild id, etc.
|
WebhookInfo: The webhook username, avatar, guild id, etc.
|
||||||
"""
|
"""
|
||||||
|
|
@ -1178,37 +1091,39 @@ def get_data_from_hook_url(hook_name: str, hook_url: str) -> WebhookInfo:
|
||||||
|
|
||||||
|
|
||||||
@app.get("/settings", response_class=HTMLResponse)
|
@app.get("/settings", response_class=HTMLResponse)
|
||||||
async def get_settings(
|
async def get_settings(request: Request):
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
):
|
|
||||||
"""Settings page.
|
"""Settings page.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request: The request object.
|
request: The request object.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The settings page.
|
HTMLResponse: The settings page.
|
||||||
"""
|
"""
|
||||||
# Get the global default update interval
|
# Get the global default update interval
|
||||||
global_interval: int = 60 # Default to 60 minutes if not set
|
global_interval: int = 60 # Default to 60 minutes if not set
|
||||||
global_update_config = reader.get_tag((), ".reader.update", None)
|
try:
|
||||||
|
global_update_config = reader.get_tag((), ".reader.update")
|
||||||
if isinstance(global_update_config, dict) and "interval" in global_update_config:
|
if isinstance(global_update_config, dict) and "interval" in global_update_config:
|
||||||
interval_value = global_update_config["interval"]
|
interval_value = global_update_config["interval"]
|
||||||
if isinstance(interval_value, int):
|
if isinstance(interval_value, int):
|
||||||
global_interval = interval_value
|
global_interval = interval_value
|
||||||
|
except TagNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
# Get all feeds with their intervals
|
# Get all feeds with their intervals
|
||||||
feeds: Iterable[Feed] = reader.get_feeds()
|
feeds: Iterable[Feed] = reader.get_feeds()
|
||||||
feed_intervals = []
|
feed_intervals = []
|
||||||
for feed in feeds:
|
for feed in feeds:
|
||||||
feed_interval: int | None = None
|
feed_interval: int | None = None
|
||||||
feed_update_config = reader.get_tag(feed, ".reader.update", None)
|
try:
|
||||||
|
feed_update_config = reader.get_tag(feed, ".reader.update")
|
||||||
if isinstance(feed_update_config, dict) and "interval" in feed_update_config:
|
if isinstance(feed_update_config, dict) and "interval" in feed_update_config:
|
||||||
interval_value = feed_update_config["interval"]
|
interval_value = feed_update_config["interval"]
|
||||||
if isinstance(interval_value, int):
|
if isinstance(interval_value, int):
|
||||||
feed_interval = interval_value
|
feed_interval = interval_value
|
||||||
|
except TagNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
feed_intervals.append({
|
feed_intervals.append({
|
||||||
"feed": feed,
|
"feed": feed,
|
||||||
|
|
@ -1226,15 +1141,11 @@ async def get_settings(
|
||||||
|
|
||||||
|
|
||||||
@app.get("/webhooks", response_class=HTMLResponse)
|
@app.get("/webhooks", response_class=HTMLResponse)
|
||||||
async def get_webhooks(
|
async def get_webhooks(request: Request):
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
):
|
|
||||||
"""Page for adding a new webhook.
|
"""Page for adding a new webhook.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request: The request object.
|
request: The request object.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The add webhook page.
|
HTMLResponse: The add webhook page.
|
||||||
|
|
@ -1255,65 +1166,54 @@ async def get_webhooks(
|
||||||
|
|
||||||
|
|
||||||
@app.get("/", response_class=HTMLResponse)
|
@app.get("/", response_class=HTMLResponse)
|
||||||
def get_index(
|
def get_index(request: Request, message: str = ""):
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
message: str = "",
|
|
||||||
):
|
|
||||||
"""This is the root of the website.
|
"""This is the root of the website.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request: The request object.
|
request: The request object.
|
||||||
message: Optional message to display to the user.
|
message: Optional message to display to the user.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The index page.
|
HTMLResponse: The index page.
|
||||||
"""
|
"""
|
||||||
return templates.TemplateResponse(
|
return templates.TemplateResponse(request=request, name="index.html", context=make_context_index(request, message))
|
||||||
request=request,
|
|
||||||
name="index.html",
|
|
||||||
context=make_context_index(request, message, reader),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def make_context_index(request: Request, message: str = "", reader: Reader | None = None):
|
def make_context_index(request: Request, message: str = ""):
|
||||||
"""Create the needed context for the index page.
|
"""Create the needed context for the index page.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request: The request object.
|
request: The request object.
|
||||||
message: Optional message to display to the user.
|
message: Optional message to display to the user.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: The context for the index page.
|
dict: The context for the index page.
|
||||||
"""
|
"""
|
||||||
effective_reader: Reader = reader or get_reader_dependency()
|
hooks: list[dict[str, str]] = cast("list[dict[str, str]]", list(reader.get_tag((), "webhooks", [])))
|
||||||
hooks: list[dict[str, str]] = cast("list[dict[str, str]]", list(effective_reader.get_tag((), "webhooks", [])))
|
|
||||||
|
|
||||||
feed_list: list[dict[str, JSONType | Feed | str]] = []
|
feed_list = []
|
||||||
broken_feeds: list[Feed] = []
|
broken_feeds = []
|
||||||
feeds_without_attached_webhook: list[Feed] = []
|
feeds_without_attached_webhook = []
|
||||||
|
|
||||||
# Get all feeds and organize them
|
# Get all feeds and organize them
|
||||||
feeds: Iterable[Feed] = effective_reader.get_feeds()
|
feeds: Iterable[Feed] = reader.get_feeds()
|
||||||
for feed in feeds:
|
for feed in feeds:
|
||||||
webhook: str = str(effective_reader.get_tag(feed.url, "webhook", ""))
|
try:
|
||||||
if not webhook:
|
webhook = reader.get_tag(feed.url, "webhook")
|
||||||
|
feed_list.append({"feed": feed, "webhook": webhook, "domain": extract_domain(feed.url)})
|
||||||
|
except TagNotFoundError:
|
||||||
broken_feeds.append(feed)
|
broken_feeds.append(feed)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
feed_list.append({"feed": feed, "webhook": webhook, "domain": extract_domain(feed.url)})
|
webhook_list = [hook["url"] for hook in hooks]
|
||||||
|
|
||||||
webhook_list: list[str] = [hook["url"] for hook in hooks]
|
|
||||||
if webhook not in webhook_list:
|
if webhook not in webhook_list:
|
||||||
feeds_without_attached_webhook.append(feed)
|
feeds_without_attached_webhook.append(feed)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"request": request,
|
"request": request,
|
||||||
"feeds": feed_list,
|
"feeds": feed_list,
|
||||||
"feed_count": effective_reader.get_feed_counts(),
|
"feed_count": reader.get_feed_counts(),
|
||||||
"entry_count": effective_reader.get_entry_counts(),
|
"entry_count": reader.get_entry_counts(),
|
||||||
"webhooks": hooks,
|
"webhooks": hooks,
|
||||||
"broken_feeds": broken_feeds,
|
"broken_feeds": broken_feeds,
|
||||||
"feeds_without_attached_webhook": feeds_without_attached_webhook,
|
"feeds_without_attached_webhook": feeds_without_attached_webhook,
|
||||||
|
|
@ -1322,15 +1222,12 @@ def make_context_index(request: Request, message: str = "", reader: Reader | Non
|
||||||
|
|
||||||
|
|
||||||
@app.post("/remove", response_class=HTMLResponse)
|
@app.post("/remove", response_class=HTMLResponse)
|
||||||
async def remove_feed(
|
async def remove_feed(feed_url: Annotated[str, Form()]):
|
||||||
feed_url: Annotated[str, Form()],
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
):
|
|
||||||
"""Get a feed by URL.
|
"""Get a feed by URL.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
feed_url: The feed to add.
|
feed_url: The feed to add.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the index page.
|
RedirectResponse: Redirect to the index page.
|
||||||
|
|
@ -1349,17 +1246,13 @@ async def remove_feed(
|
||||||
|
|
||||||
|
|
||||||
@app.get("/update", response_class=HTMLResponse)
|
@app.get("/update", response_class=HTMLResponse)
|
||||||
async def update_feed(
|
async def update_feed(request: Request, feed_url: str):
|
||||||
request: Request,
|
|
||||||
feed_url: str,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
):
|
|
||||||
"""Update a feed.
|
"""Update a feed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request: The request object.
|
request: The request object.
|
||||||
feed_url: The feed URL to update.
|
feed_url: The feed URL to update.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page.
|
RedirectResponse: Redirect to the feed page.
|
||||||
|
|
@ -1377,15 +1270,11 @@ async def update_feed(
|
||||||
|
|
||||||
|
|
||||||
@app.post("/backup")
|
@app.post("/backup")
|
||||||
async def manual_backup(
|
async def manual_backup(request: Request) -> RedirectResponse:
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
) -> RedirectResponse:
|
|
||||||
"""Manually trigger a git backup of the current state.
|
"""Manually trigger a git backup of the current state.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request: The request object.
|
request: The request object.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the index page with a success or error message.
|
RedirectResponse: Redirect to the index page with a success or error message.
|
||||||
|
|
@ -1408,81 +1297,51 @@ async def manual_backup(
|
||||||
|
|
||||||
|
|
||||||
@app.get("/search", response_class=HTMLResponse)
|
@app.get("/search", response_class=HTMLResponse)
|
||||||
async def search(
|
async def search(request: Request, query: str):
|
||||||
request: Request,
|
|
||||||
query: str,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
):
|
|
||||||
"""Get entries matching a full-text search query.
|
"""Get entries matching a full-text search query.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
query: The query to search for.
|
query: The query to search for.
|
||||||
request: The request object.
|
request: The request object.
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The search page.
|
HTMLResponse: The search page.
|
||||||
"""
|
"""
|
||||||
reader.update_search()
|
reader.update_search()
|
||||||
context = create_search_context(query, reader=reader)
|
context = create_search_context(query)
|
||||||
return templates.TemplateResponse(request=request, name="search.html", context={"request": request, **context})
|
return templates.TemplateResponse(request=request, name="search.html", context={"request": request, **context})
|
||||||
|
|
||||||
|
|
||||||
@app.get("/post_entry", response_class=HTMLResponse)
|
@app.get("/post_entry", response_class=HTMLResponse)
|
||||||
async def post_entry(
|
async def post_entry(entry_id: str):
|
||||||
entry_id: str,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
feed_url: str = "",
|
|
||||||
):
|
|
||||||
"""Send single entry to Discord.
|
"""Send single entry to Discord.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
entry_id: The entry to send.
|
entry_id: The entry to send.
|
||||||
feed_url: Optional feed URL used to disambiguate entries with identical IDs.
|
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the feed page.
|
RedirectResponse: Redirect to the feed page.
|
||||||
"""
|
"""
|
||||||
unquoted_entry_id: str = urllib.parse.unquote(entry_id)
|
unquoted_entry_id: str = urllib.parse.unquote(entry_id)
|
||||||
clean_feed_url: str = urllib.parse.unquote(feed_url.strip()) if feed_url else ""
|
entry: Entry | None = next((entry for entry in reader.get_entries() if entry.id == unquoted_entry_id), None)
|
||||||
|
|
||||||
# Prefer feed-scoped lookup when feed_url is provided. This avoids ambiguity when
|
|
||||||
# multiple feeds contain entries with the same ID.
|
|
||||||
entry: Entry | None = None
|
|
||||||
if clean_feed_url:
|
|
||||||
entry = next(
|
|
||||||
(entry for entry in reader.get_entries(feed=clean_feed_url) if entry.id == unquoted_entry_id),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
entry = next((entry for entry in reader.get_entries() if entry.id == unquoted_entry_id), None)
|
|
||||||
|
|
||||||
if entry is None:
|
if entry is None:
|
||||||
return HTMLResponse(status_code=404, content=f"Entry '{entry_id}' not found.")
|
return HTMLResponse(status_code=404, content=f"Entry '{entry_id}' not found.")
|
||||||
|
|
||||||
if result := send_entry_to_discord(entry=entry, reader=reader):
|
if result := send_entry_to_discord(entry=entry):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# Redirect to the feed page.
|
# Redirect to the feed page.
|
||||||
redirect_feed_url: str = entry.feed.url.strip()
|
clean_feed_url: str = entry.feed.url.strip()
|
||||||
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(redirect_feed_url)}", status_code=303)
|
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||||
|
|
||||||
|
|
||||||
@app.post("/modify_webhook", response_class=HTMLResponse)
|
@app.post("/modify_webhook", response_class=HTMLResponse)
|
||||||
def modify_webhook(
|
def modify_webhook(old_hook: Annotated[str, Form()], new_hook: Annotated[str, Form()]):
|
||||||
old_hook: Annotated[str, Form()],
|
|
||||||
new_hook: Annotated[str, Form()],
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
redirect_to: Annotated[str, Form()] = "",
|
|
||||||
):
|
|
||||||
"""Modify a webhook.
|
"""Modify a webhook.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
old_hook: The webhook to modify.
|
old_hook: The webhook to modify.
|
||||||
new_hook: The new webhook.
|
new_hook: The new webhook.
|
||||||
redirect_to: Optional redirect URL after the update.
|
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
RedirectResponse: Redirect to the webhook page.
|
RedirectResponse: Redirect to the webhook page.
|
||||||
|
|
@ -1497,20 +1356,15 @@ def modify_webhook(
|
||||||
# Webhooks are stored as a list of dictionaries.
|
# Webhooks are stored as a list of dictionaries.
|
||||||
# Example: [{"name": "webhook_name", "url": "webhook_url"}]
|
# Example: [{"name": "webhook_name", "url": "webhook_url"}]
|
||||||
webhooks = cast("list[dict[str, str]]", webhooks)
|
webhooks = cast("list[dict[str, str]]", webhooks)
|
||||||
old_hook_clean: str = old_hook.strip()
|
|
||||||
new_hook_clean: str = new_hook.strip()
|
|
||||||
webhook_modified: bool = False
|
|
||||||
|
|
||||||
for hook in webhooks:
|
for hook in webhooks:
|
||||||
if hook["url"] in old_hook_clean:
|
if hook["url"] in old_hook.strip():
|
||||||
hook["url"] = new_hook_clean
|
hook["url"] = new_hook.strip()
|
||||||
|
|
||||||
# Check if it has been modified.
|
# Check if it has been modified.
|
||||||
if hook["url"] != new_hook_clean:
|
if hook["url"] != new_hook.strip():
|
||||||
raise HTTPException(status_code=500, detail="Webhook could not be modified")
|
raise HTTPException(status_code=500, detail="Webhook could not be modified")
|
||||||
|
|
||||||
webhook_modified = True
|
|
||||||
|
|
||||||
# Add our new list of webhooks to the database.
|
# Add our new list of webhooks to the database.
|
||||||
reader.set_tag((), "webhooks", webhooks) # pyright: ignore[reportArgumentType]
|
reader.set_tag((), "webhooks", webhooks) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
|
|
@ -1518,21 +1372,16 @@ def modify_webhook(
|
||||||
# matches the old one.
|
# matches the old one.
|
||||||
feeds: Iterable[Feed] = reader.get_feeds()
|
feeds: Iterable[Feed] = reader.get_feeds()
|
||||||
for feed in feeds:
|
for feed in feeds:
|
||||||
webhook: str = str(reader.get_tag(feed, "webhook", ""))
|
try:
|
||||||
|
webhook = reader.get_tag(feed, "webhook")
|
||||||
|
except TagNotFoundError:
|
||||||
|
continue
|
||||||
|
|
||||||
if webhook == old_hook_clean:
|
if webhook == old_hook.strip():
|
||||||
reader.set_tag(feed.url, "webhook", new_hook_clean) # pyright: ignore[reportArgumentType]
|
reader.set_tag(feed.url, "webhook", new_hook.strip()) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
if webhook_modified and old_hook_clean != new_hook_clean:
|
# Redirect to the webhook page.
|
||||||
commit_state_change(reader, f"Modify webhook URL from {old_hook_clean} to {new_hook_clean}")
|
return RedirectResponse(url="/webhooks", status_code=303)
|
||||||
|
|
||||||
redirect_url: str = redirect_to.strip() or "/webhooks"
|
|
||||||
if redirect_to:
|
|
||||||
redirect_url = redirect_url.replace(urllib.parse.quote(old_hook_clean), urllib.parse.quote(new_hook_clean))
|
|
||||||
redirect_url = redirect_url.replace(old_hook_clean, new_hook_clean)
|
|
||||||
|
|
||||||
# Redirect to the requested page.
|
|
||||||
return RedirectResponse(url=redirect_url, status_code=303)
|
|
||||||
|
|
||||||
|
|
||||||
def extract_youtube_video_id(url: str) -> str | None:
|
def extract_youtube_video_id(url: str) -> str | None:
|
||||||
|
|
@ -1558,216 +1407,11 @@ def extract_youtube_video_id(url: str) -> str | None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def resolve_final_feed_url(url: str) -> tuple[str, str | None]:
|
|
||||||
"""Resolve a feed URL by following redirects.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url: The feed URL to resolve.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[str, str | None]: A tuple with (resolved_url, error_message).
|
|
||||||
error_message is None when resolution succeeded.
|
|
||||||
"""
|
|
||||||
clean_url: str = url.strip()
|
|
||||||
if not clean_url:
|
|
||||||
return "", "URL is empty"
|
|
||||||
|
|
||||||
if not is_url_valid(clean_url):
|
|
||||||
return clean_url, "URL is invalid"
|
|
||||||
|
|
||||||
try:
|
|
||||||
response: Response = httpx.get(clean_url, follow_redirects=True, timeout=10.0)
|
|
||||||
except httpx.HTTPError as e:
|
|
||||||
return clean_url, str(e)
|
|
||||||
|
|
||||||
if not response.is_success:
|
|
||||||
return clean_url, f"HTTP {response.status_code}"
|
|
||||||
|
|
||||||
return str(response.url), None
|
|
||||||
|
|
||||||
|
|
||||||
def create_webhook_feed_url_preview(
|
|
||||||
webhook_feeds: list[Feed],
|
|
||||||
replace_from: str,
|
|
||||||
replace_to: str,
|
|
||||||
resolve_urls: bool, # noqa: FBT001
|
|
||||||
force_update: bool = False, # noqa: FBT001, FBT002
|
|
||||||
existing_feed_urls: set[str] | None = None,
|
|
||||||
) -> list[dict[str, str | bool | None]]:
|
|
||||||
"""Create preview rows for bulk feed URL replacement.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
webhook_feeds: Feeds attached to a webhook.
|
|
||||||
replace_from: Text to replace in each URL.
|
|
||||||
replace_to: Replacement text.
|
|
||||||
resolve_urls: Whether to resolve resulting URLs via HTTP redirects.
|
|
||||||
force_update: Whether conflicts should be marked as force-overwritable.
|
|
||||||
existing_feed_urls: Optional set of all tracked feed URLs used for conflict detection.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[dict[str, str | bool | None]]: Rows used in the preview table.
|
|
||||||
"""
|
|
||||||
known_feed_urls: set[str] = existing_feed_urls or {feed.url for feed in webhook_feeds}
|
|
||||||
preview_rows: list[dict[str, str | bool | None]] = []
|
|
||||||
for feed in webhook_feeds:
|
|
||||||
old_url: str = feed.url
|
|
||||||
has_match: bool = bool(replace_from and replace_from in old_url)
|
|
||||||
|
|
||||||
candidate_url: str = old_url
|
|
||||||
if has_match:
|
|
||||||
candidate_url = old_url.replace(replace_from, replace_to)
|
|
||||||
|
|
||||||
resolved_url: str = candidate_url
|
|
||||||
resolution_error: str | None = None
|
|
||||||
if has_match and candidate_url != old_url and resolve_urls:
|
|
||||||
resolved_url, resolution_error = resolve_final_feed_url(candidate_url)
|
|
||||||
|
|
||||||
will_force_ignore_errors: bool = bool(
|
|
||||||
force_update and bool(resolution_error) and has_match and old_url != candidate_url,
|
|
||||||
)
|
|
||||||
|
|
||||||
target_exists: bool = bool(
|
|
||||||
has_match and not resolution_error and resolved_url != old_url and resolved_url in known_feed_urls,
|
|
||||||
)
|
|
||||||
will_force_overwrite: bool = bool(target_exists and force_update)
|
|
||||||
will_change: bool = bool(
|
|
||||||
has_match
|
|
||||||
and old_url != (candidate_url if will_force_ignore_errors else resolved_url)
|
|
||||||
and (not target_exists or will_force_overwrite)
|
|
||||||
and (not resolution_error or will_force_ignore_errors),
|
|
||||||
)
|
|
||||||
|
|
||||||
preview_rows.append({
|
|
||||||
"old_url": old_url,
|
|
||||||
"candidate_url": candidate_url,
|
|
||||||
"resolved_url": resolved_url,
|
|
||||||
"has_match": has_match,
|
|
||||||
"will_change": will_change,
|
|
||||||
"target_exists": target_exists,
|
|
||||||
"will_force_overwrite": will_force_overwrite,
|
|
||||||
"will_force_ignore_errors": will_force_ignore_errors,
|
|
||||||
"resolution_error": resolution_error,
|
|
||||||
})
|
|
||||||
|
|
||||||
return preview_rows
|
|
||||||
|
|
||||||
|
|
||||||
def build_webhook_mass_update_context(
|
|
||||||
webhook_feeds: list[Feed],
|
|
||||||
all_feeds: list[Feed],
|
|
||||||
replace_from: str,
|
|
||||||
replace_to: str,
|
|
||||||
resolve_urls: bool, # noqa: FBT001
|
|
||||||
force_update: bool = False, # noqa: FBT001, FBT002
|
|
||||||
) -> dict[str, str | bool | int | list[dict[str, str | bool | None]] | dict[str, int]]:
|
|
||||||
"""Build context data used by the webhook mass URL update preview UI.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
webhook_feeds: Feeds attached to the selected webhook.
|
|
||||||
all_feeds: All tracked feeds.
|
|
||||||
replace_from: Text to replace in URLs.
|
|
||||||
replace_to: Replacement text.
|
|
||||||
resolve_urls: Whether to resolve resulting URLs.
|
|
||||||
force_update: Whether to allow overwriting existing target URLs.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict[str, ...]: Context values for rendering preview controls and table.
|
|
||||||
"""
|
|
||||||
clean_replace_from: str = replace_from.strip()
|
|
||||||
clean_replace_to: str = replace_to.strip()
|
|
||||||
|
|
||||||
preview_rows: list[dict[str, str | bool | None]] = []
|
|
||||||
if clean_replace_from:
|
|
||||||
preview_rows = create_webhook_feed_url_preview(
|
|
||||||
webhook_feeds=webhook_feeds,
|
|
||||||
replace_from=clean_replace_from,
|
|
||||||
replace_to=clean_replace_to,
|
|
||||||
resolve_urls=resolve_urls,
|
|
||||||
force_update=force_update,
|
|
||||||
existing_feed_urls={feed.url for feed in all_feeds},
|
|
||||||
)
|
|
||||||
|
|
||||||
preview_summary: dict[str, int] = {
|
|
||||||
"total": len(preview_rows),
|
|
||||||
"matched": sum(1 for row in preview_rows if row["has_match"]),
|
|
||||||
"will_update": sum(1 for row in preview_rows if row["will_change"]),
|
|
||||||
"conflicts": sum(1 for row in preview_rows if row["target_exists"] and not row["will_force_overwrite"]),
|
|
||||||
"force_overwrite": sum(1 for row in preview_rows if row["will_force_overwrite"]),
|
|
||||||
"force_ignore_errors": sum(1 for row in preview_rows if row["will_force_ignore_errors"]),
|
|
||||||
"resolve_errors": sum(1 for row in preview_rows if row["resolution_error"]),
|
|
||||||
}
|
|
||||||
preview_summary["no_match"] = preview_summary["total"] - preview_summary["matched"]
|
|
||||||
preview_summary["no_change"] = sum(
|
|
||||||
1 for row in preview_rows if row["has_match"] and not row["resolution_error"] and not row["will_change"]
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"replace_from": clean_replace_from,
|
|
||||||
"replace_to": clean_replace_to,
|
|
||||||
"resolve_urls": resolve_urls,
|
|
||||||
"force_update": force_update,
|
|
||||||
"preview_rows": preview_rows,
|
|
||||||
"preview_summary": preview_summary,
|
|
||||||
"preview_change_count": preview_summary["will_update"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/webhook_entries_mass_update_preview", response_class=HTMLResponse)
|
|
||||||
async def get_webhook_entries_mass_update_preview(
|
|
||||||
webhook_url: str,
|
|
||||||
request: Request,
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
replace_from: str = "",
|
|
||||||
replace_to: str = "",
|
|
||||||
resolve_urls: bool = True, # noqa: FBT001, FBT002
|
|
||||||
force_update: bool = False, # noqa: FBT001, FBT002
|
|
||||||
) -> HTMLResponse:
|
|
||||||
"""Render the mass-update preview fragment for a webhook using HTMX.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
webhook_url: Webhook URL whose feeds are being updated.
|
|
||||||
request: The request object.
|
|
||||||
reader: The Reader instance.
|
|
||||||
replace_from: Text to find in URLs.
|
|
||||||
replace_to: Replacement text.
|
|
||||||
resolve_urls: Whether to resolve resulting URLs.
|
|
||||||
force_update: Whether to allow overwriting existing target URLs.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
HTMLResponse: Rendered partial template containing summary + preview table.
|
|
||||||
"""
|
|
||||||
clean_webhook_url: str = urllib.parse.unquote(webhook_url.strip())
|
|
||||||
all_feeds: list[Feed] = list(reader.get_feeds())
|
|
||||||
webhook_feeds: list[Feed] = [
|
|
||||||
feed for feed in all_feeds if str(reader.get_tag(feed.url, "webhook", "")) == clean_webhook_url
|
|
||||||
]
|
|
||||||
|
|
||||||
context = {
|
|
||||||
"request": request,
|
|
||||||
"webhook_url": clean_webhook_url,
|
|
||||||
**build_webhook_mass_update_context(
|
|
||||||
webhook_feeds=webhook_feeds,
|
|
||||||
all_feeds=all_feeds,
|
|
||||||
replace_from=replace_from,
|
|
||||||
replace_to=replace_to,
|
|
||||||
resolve_urls=resolve_urls,
|
|
||||||
force_update=force_update,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
return templates.TemplateResponse(request=request, name="_webhook_mass_update_preview.html", context=context)
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/webhook_entries", response_class=HTMLResponse)
|
@app.get("/webhook_entries", response_class=HTMLResponse)
|
||||||
async def get_webhook_entries( # noqa: C901, PLR0914
|
async def get_webhook_entries( # noqa: C901, PLR0912, PLR0914
|
||||||
webhook_url: str,
|
webhook_url: str,
|
||||||
request: Request,
|
request: Request,
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
starting_after: str = "",
|
starting_after: str = "",
|
||||||
replace_from: str = "",
|
|
||||||
replace_to: str = "",
|
|
||||||
resolve_urls: bool = True, # noqa: FBT001, FBT002
|
|
||||||
force_update: bool = False, # noqa: FBT001, FBT002
|
|
||||||
message: str = "",
|
|
||||||
) -> HTMLResponse:
|
) -> HTMLResponse:
|
||||||
"""Get all latest entries from all feeds for a specific webhook.
|
"""Get all latest entries from all feeds for a specific webhook.
|
||||||
|
|
||||||
|
|
@ -1775,12 +1419,6 @@ async def get_webhook_entries( # noqa: C901, PLR0914
|
||||||
webhook_url: The webhook URL to get entries for.
|
webhook_url: The webhook URL to get entries for.
|
||||||
request: The request object.
|
request: The request object.
|
||||||
starting_after: The entry to start after. Used for pagination.
|
starting_after: The entry to start after. Used for pagination.
|
||||||
replace_from: Optional URL substring to find for bulk URL replacement preview.
|
|
||||||
replace_to: Optional replacement substring used in bulk URL replacement preview.
|
|
||||||
resolve_urls: Whether to resolve replaced URLs by following redirects.
|
|
||||||
force_update: Whether to allow overwriting existing target URLs during apply.
|
|
||||||
message: Optional status message shown in the UI.
|
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HTMLResponse: The webhook entries page.
|
HTMLResponse: The webhook entries page.
|
||||||
|
|
@ -1802,26 +1440,24 @@ async def get_webhook_entries( # noqa: C901, PLR0914
|
||||||
if not webhook_name:
|
if not webhook_name:
|
||||||
raise HTTPException(status_code=404, detail=f"Webhook not found: {clean_webhook_url}")
|
raise HTTPException(status_code=404, detail=f"Webhook not found: {clean_webhook_url}")
|
||||||
|
|
||||||
hook_info: WebhookInfo = get_data_from_hook_url(hook_name=webhook_name, hook_url=clean_webhook_url)
|
|
||||||
|
|
||||||
# Get all feeds associated with this webhook
|
# Get all feeds associated with this webhook
|
||||||
all_feeds: list[Feed] = list(reader.get_feeds())
|
all_feeds: list[Feed] = list(reader.get_feeds())
|
||||||
webhook_feeds: list[Feed] = []
|
webhook_feeds: list[Feed] = []
|
||||||
|
|
||||||
for feed in all_feeds:
|
for feed in all_feeds:
|
||||||
|
try:
|
||||||
feed_webhook: str = str(reader.get_tag(feed.url, "webhook", ""))
|
feed_webhook: str = str(reader.get_tag(feed.url, "webhook", ""))
|
||||||
if feed_webhook == clean_webhook_url:
|
if feed_webhook == clean_webhook_url:
|
||||||
webhook_feeds.append(feed)
|
webhook_feeds.append(feed)
|
||||||
|
except TagNotFoundError:
|
||||||
|
continue
|
||||||
|
|
||||||
# Get all entries from all feeds for this webhook, sorted by published date
|
# Get all entries from all feeds for this webhook, sorted by published date
|
||||||
all_entries: list[Entry] = [entry for feed in webhook_feeds for entry in reader.get_entries(feed=feed)]
|
all_entries: list[Entry] = [entry for feed in webhook_feeds for entry in reader.get_entries(feed=feed)]
|
||||||
|
|
||||||
# Sort entries by published date (newest first), with undated entries last.
|
# Sort entries by published date (newest first)
|
||||||
all_entries.sort(
|
all_entries.sort(
|
||||||
key=lambda e: (
|
key=lambda e: e.published or datetime.now(tz=UTC),
|
||||||
e.published is not None,
|
|
||||||
e.published or datetime.min.replace(tzinfo=UTC),
|
|
||||||
),
|
|
||||||
reverse=True,
|
reverse=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -1854,16 +1490,7 @@ async def get_webhook_entries( # noqa: C901, PLR0914
|
||||||
last_entry = paginated_entries[-1]
|
last_entry = paginated_entries[-1]
|
||||||
|
|
||||||
# Create the html for the entries
|
# Create the html for the entries
|
||||||
html: str = create_html_for_feed(reader=reader, entries=paginated_entries)
|
html: str = create_html_for_feed(paginated_entries)
|
||||||
|
|
||||||
mass_update_context = build_webhook_mass_update_context(
|
|
||||||
webhook_feeds=webhook_feeds,
|
|
||||||
all_feeds=all_feeds,
|
|
||||||
replace_from=replace_from,
|
|
||||||
replace_to=replace_to,
|
|
||||||
resolve_urls=resolve_urls,
|
|
||||||
force_update=force_update,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if there are more entries available
|
# Check if there are more entries available
|
||||||
total_entries: int = len(all_entries)
|
total_entries: int = len(all_entries)
|
||||||
|
|
@ -1871,155 +1498,18 @@ async def get_webhook_entries( # noqa: C901, PLR0914
|
||||||
|
|
||||||
context = {
|
context = {
|
||||||
"request": request,
|
"request": request,
|
||||||
"hook_info": hook_info,
|
|
||||||
"webhook_name": webhook_name,
|
"webhook_name": webhook_name,
|
||||||
"webhook_url": clean_webhook_url,
|
"webhook_url": clean_webhook_url,
|
||||||
"webhook_feeds": webhook_feeds,
|
|
||||||
"entries": paginated_entries,
|
"entries": paginated_entries,
|
||||||
"html": html,
|
"html": html,
|
||||||
"last_entry": last_entry,
|
"last_entry": last_entry,
|
||||||
"is_show_more_entries_button_visible": is_show_more_entries_button_visible,
|
"is_show_more_entries_button_visible": is_show_more_entries_button_visible,
|
||||||
"total_entries": total_entries,
|
"total_entries": total_entries,
|
||||||
"feeds_count": len(webhook_feeds),
|
"feeds_count": len(webhook_feeds),
|
||||||
"message": urllib.parse.unquote(message) if message else "",
|
|
||||||
**mass_update_context,
|
|
||||||
}
|
}
|
||||||
return templates.TemplateResponse(request=request, name="webhook_entries.html", context=context)
|
return templates.TemplateResponse(request=request, name="webhook_entries.html", context=context)
|
||||||
|
|
||||||
|
|
||||||
@app.post("/bulk_change_feed_urls", response_class=HTMLResponse)
|
|
||||||
async def post_bulk_change_feed_urls( # noqa: C901, PLR0914, PLR0912, PLR0915
|
|
||||||
webhook_url: Annotated[str, Form()],
|
|
||||||
replace_from: Annotated[str, Form()],
|
|
||||||
reader: Annotated[Reader, Depends(get_reader_dependency)],
|
|
||||||
replace_to: Annotated[str, Form()] = "",
|
|
||||||
resolve_urls: Annotated[bool, Form()] = True, # noqa: FBT002
|
|
||||||
force_update: Annotated[bool, Form()] = False, # noqa: FBT002
|
|
||||||
) -> RedirectResponse:
|
|
||||||
"""Bulk-change feed URLs attached to a webhook.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
webhook_url: The webhook URL whose feeds should be updated.
|
|
||||||
replace_from: Text to find in each URL.
|
|
||||||
replace_to: Text to replace with.
|
|
||||||
resolve_urls: Whether to resolve resulting URLs via redirects.
|
|
||||||
force_update: Whether existing target feed URLs should be overwritten.
|
|
||||||
reader: The Reader instance.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
RedirectResponse: Redirect to webhook detail with status message.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
HTTPException: If webhook is missing or replace_from is empty.
|
|
||||||
"""
|
|
||||||
clean_webhook_url: str = urllib.parse.unquote(webhook_url.strip())
|
|
||||||
clean_replace_from: str = replace_from.strip()
|
|
||||||
clean_replace_to: str = replace_to.strip()
|
|
||||||
|
|
||||||
if not clean_replace_from:
|
|
||||||
raise HTTPException(status_code=400, detail="replace_from cannot be empty")
|
|
||||||
|
|
||||||
webhooks: list[dict[str, str]] = cast("list[dict[str, str]]", list(reader.get_tag((), "webhooks", [])))
|
|
||||||
if not any(hook["url"] == clean_webhook_url for hook in webhooks):
|
|
||||||
raise HTTPException(status_code=404, detail=f"Webhook not found: {clean_webhook_url}")
|
|
||||||
|
|
||||||
all_feeds: list[Feed] = list(reader.get_feeds())
|
|
||||||
webhook_feeds: list[Feed] = []
|
|
||||||
for feed in all_feeds:
|
|
||||||
feed_webhook: str = str(reader.get_tag(feed.url, "webhook", ""))
|
|
||||||
if feed_webhook == clean_webhook_url:
|
|
||||||
webhook_feeds.append(feed)
|
|
||||||
|
|
||||||
preview_rows: list[dict[str, str | bool | None]] = create_webhook_feed_url_preview(
|
|
||||||
webhook_feeds=webhook_feeds,
|
|
||||||
replace_from=clean_replace_from,
|
|
||||||
replace_to=clean_replace_to,
|
|
||||||
resolve_urls=resolve_urls,
|
|
||||||
force_update=force_update,
|
|
||||||
existing_feed_urls={feed.url for feed in all_feeds},
|
|
||||||
)
|
|
||||||
|
|
||||||
changed_count: int = 0
|
|
||||||
skipped_count: int = 0
|
|
||||||
failed_count: int = 0
|
|
||||||
conflict_count: int = 0
|
|
||||||
force_overwrite_count: int = 0
|
|
||||||
|
|
||||||
for row in preview_rows:
|
|
||||||
if not row["has_match"]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if row["resolution_error"] and not force_update:
|
|
||||||
skipped_count += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
if row["target_exists"] and not force_update:
|
|
||||||
conflict_count += 1
|
|
||||||
skipped_count += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
old_url: str = str(row["old_url"])
|
|
||||||
new_url: str = str(row["candidate_url"] if row["will_force_ignore_errors"] else row["resolved_url"])
|
|
||||||
|
|
||||||
if old_url == new_url:
|
|
||||||
skipped_count += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
if row["target_exists"] and force_update:
|
|
||||||
try:
|
|
||||||
reader.delete_feed(new_url)
|
|
||||||
force_overwrite_count += 1
|
|
||||||
except FeedNotFoundError:
|
|
||||||
pass
|
|
||||||
except ReaderError:
|
|
||||||
failed_count += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
reader.change_feed_url(old_url, new_url)
|
|
||||||
except FeedExistsError:
|
|
||||||
skipped_count += 1
|
|
||||||
continue
|
|
||||||
except FeedNotFoundError:
|
|
||||||
skipped_count += 1
|
|
||||||
continue
|
|
||||||
except ReaderError:
|
|
||||||
failed_count += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
reader.update_feed(new_url)
|
|
||||||
except Exception:
|
|
||||||
logger.exception("Failed to update feed after URL change: %s", new_url)
|
|
||||||
|
|
||||||
for entry in reader.get_entries(feed=new_url, read=False):
|
|
||||||
try:
|
|
||||||
reader.set_entry_read(entry, True)
|
|
||||||
except Exception:
|
|
||||||
logger.exception("Failed to mark entry as read after URL change: %s", entry.id)
|
|
||||||
|
|
||||||
changed_count += 1
|
|
||||||
|
|
||||||
if changed_count > 0:
|
|
||||||
commit_state_change(
|
|
||||||
reader,
|
|
||||||
f"Bulk change {changed_count} feed URL(s) for webhook {clean_webhook_url}",
|
|
||||||
)
|
|
||||||
|
|
||||||
status_message: str = (
|
|
||||||
f"Updated {changed_count} feed URL(s). "
|
|
||||||
f"Force overwrote {force_overwrite_count}. "
|
|
||||||
f"Conflicts {conflict_count}. "
|
|
||||||
f"Skipped {skipped_count}. "
|
|
||||||
f"Failed {failed_count}."
|
|
||||||
)
|
|
||||||
redirect_url: str = (
|
|
||||||
f"/webhook_entries?webhook_url={urllib.parse.quote(clean_webhook_url)}"
|
|
||||||
f"&message={urllib.parse.quote(status_message)}"
|
|
||||||
)
|
|
||||||
return RedirectResponse(url=redirect_url, status_code=303)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sentry_sdk.init(
|
sentry_sdk.init(
|
||||||
dsn="https://6e77a0d7acb9c7ea22e85a375e0ff1f4@o4505228040339456.ingest.us.sentry.io/4508792887967744",
|
dsn="https://6e77a0d7acb9c7ea22e85a375e0ff1f4@o4505228040339456.ingest.us.sentry.io/4508792887967744",
|
||||||
|
|
|
||||||
109
discord_rss_bot/missing_tags.py
Normal file
109
discord_rss_bot/missing_tags.py
Normal file
|
|
@ -0,0 +1,109 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from reader import Feed
|
||||||
|
from reader import Reader
|
||||||
|
from reader import TagNotFoundError
|
||||||
|
|
||||||
|
from discord_rss_bot.settings import default_custom_embed
|
||||||
|
from discord_rss_bot.settings import default_custom_message
|
||||||
|
|
||||||
|
|
||||||
|
def add_custom_message(reader: Reader, feed: Feed) -> None:
|
||||||
|
"""Add the custom message tag to the feed if it doesn't exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
reader: What Reader to use.
|
||||||
|
feed: The feed to add the tag to.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
reader.get_tag(feed, "custom_message")
|
||||||
|
except TagNotFoundError:
|
||||||
|
reader.set_tag(feed.url, "custom_message", default_custom_message) # pyright: ignore[reportArgumentType]
|
||||||
|
reader.set_tag(feed.url, "has_custom_message", True) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
|
|
||||||
|
def add_has_custom_message(reader: Reader, feed: Feed) -> None:
|
||||||
|
"""Add the has_custom_message tag to the feed if it doesn't exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
reader: What Reader to use.
|
||||||
|
feed: The feed to add the tag to.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
reader.get_tag(feed, "has_custom_message")
|
||||||
|
except TagNotFoundError:
|
||||||
|
if reader.get_tag(feed, "custom_message") == default_custom_message:
|
||||||
|
reader.set_tag(feed.url, "has_custom_message", False) # pyright: ignore[reportArgumentType]
|
||||||
|
else:
|
||||||
|
reader.set_tag(feed.url, "has_custom_message", True) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
|
|
||||||
|
def add_if_embed(reader: Reader, feed: Feed) -> None:
|
||||||
|
"""Add the if_embed tag to the feed if it doesn't exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
reader: What Reader to use.
|
||||||
|
feed: The feed to add the tag to.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
reader.get_tag(feed, "if_embed")
|
||||||
|
except TagNotFoundError:
|
||||||
|
reader.set_tag(feed.url, "if_embed", True) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
|
|
||||||
|
def add_custom_embed(reader: Reader, feed: Feed) -> None:
|
||||||
|
"""Add the custom embed tag to the feed if it doesn't exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
reader: What Reader to use.
|
||||||
|
feed: The feed to add the tag to.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
reader.get_tag(feed, "embed")
|
||||||
|
except TagNotFoundError:
|
||||||
|
reader.set_tag(feed.url, "embed", default_custom_embed) # pyright: ignore[reportArgumentType]
|
||||||
|
reader.set_tag(feed.url, "has_custom_embed", True) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
|
|
||||||
|
def add_has_custom_embed(reader: Reader, feed: Feed) -> None:
|
||||||
|
"""Add the has_custom_embed tag to the feed if it doesn't exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
reader: What Reader to use.
|
||||||
|
feed: The feed to add the tag to.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
reader.get_tag(feed, "has_custom_embed")
|
||||||
|
except TagNotFoundError:
|
||||||
|
if reader.get_tag(feed, "embed") == default_custom_embed:
|
||||||
|
reader.set_tag(feed.url, "has_custom_embed", False) # pyright: ignore[reportArgumentType]
|
||||||
|
else:
|
||||||
|
reader.set_tag(feed.url, "has_custom_embed", True) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
|
|
||||||
|
def add_should_send_embed(reader: Reader, feed: Feed) -> None:
|
||||||
|
"""Add the should_send_embed tag to the feed if it doesn't exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
reader: What Reader to use.
|
||||||
|
feed: The feed to add the tag to.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
reader.get_tag(feed, "should_send_embed")
|
||||||
|
except TagNotFoundError:
|
||||||
|
reader.set_tag(feed.url, "should_send_embed", True) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
|
|
||||||
|
def add_missing_tags(reader: Reader) -> None:
|
||||||
|
"""Add missing tags to feeds.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
reader: What Reader to use.
|
||||||
|
"""
|
||||||
|
for feed in reader.get_feeds():
|
||||||
|
add_custom_message(reader, feed)
|
||||||
|
add_has_custom_message(reader, feed)
|
||||||
|
add_if_embed(reader, feed)
|
||||||
|
add_custom_embed(reader, feed)
|
||||||
|
add_has_custom_embed(reader, feed)
|
||||||
|
add_should_send_embed(reader, feed)
|
||||||
|
|
@ -3,6 +3,8 @@ from __future__ import annotations
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from discord_rss_bot.settings import get_reader
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
|
|
||||||
|
|
@ -12,16 +14,19 @@ if TYPE_CHECKING:
|
||||||
from reader import Reader
|
from reader import Reader
|
||||||
|
|
||||||
|
|
||||||
def create_search_context(query: str, reader: Reader) -> dict:
|
def create_search_context(query: str, custom_reader: Reader | None = None) -> dict:
|
||||||
"""Build context for search.html template.
|
"""Build context for search.html template.
|
||||||
|
|
||||||
|
If custom_reader is None, use the default reader from settings.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
query (str): The search query.
|
query (str): The search query.
|
||||||
reader (Reader): Custom Reader instance.
|
custom_reader (Reader | None): Optional custom Reader instance.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: Context dictionary for rendering the search results.
|
dict: Context dictionary for rendering the search results.
|
||||||
"""
|
"""
|
||||||
|
reader: Reader = get_reader() if custom_reader is None else custom_reader
|
||||||
search_results: Iterable[EntrySearchResult] = reader.search_entries(query)
|
search_results: Iterable[EntrySearchResult] = reader.search_entries(query)
|
||||||
|
|
||||||
results: list[dict] = []
|
results: list[dict] = []
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ from pathlib import Path
|
||||||
|
|
||||||
from platformdirs import user_data_dir
|
from platformdirs import user_data_dir
|
||||||
from reader import Reader
|
from reader import Reader
|
||||||
|
from reader import TagNotFoundError
|
||||||
from reader import make_reader
|
from reader import make_reader
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
|
|
@ -47,7 +48,9 @@ def get_reader(custom_location: Path | None = None) -> Reader:
|
||||||
# https://reader.readthedocs.io/en/latest/api.html#reader.types.UpdateConfig
|
# https://reader.readthedocs.io/en/latest/api.html#reader.types.UpdateConfig
|
||||||
# Set the default update interval to 15 minutes if not already configured
|
# Set the default update interval to 15 minutes if not already configured
|
||||||
# Users can change this via the Settings page or per-feed in the feed page
|
# Users can change this via the Settings page or per-feed in the feed page
|
||||||
if reader.get_tag((), ".reader.update", None) is None:
|
try:
|
||||||
|
reader.get_tag((), ".reader.update")
|
||||||
|
except TagNotFoundError:
|
||||||
# Set default
|
# Set default
|
||||||
reader.set_tag((), ".reader.update", {"interval": 15})
|
reader.set_tag((), ".reader.update", {"interval": 15})
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,73 +0,0 @@
|
||||||
{% if preview_rows %}
|
|
||||||
<p class="small text-muted mb-1">
|
|
||||||
{{ preview_change_count }} feed URL{{ 's' if preview_change_count != 1 else '' }} ready to update.
|
|
||||||
</p>
|
|
||||||
<div class="small text-muted mb-2 d-flex flex-wrap gap-2">
|
|
||||||
<span class="badge bg-secondary">Total: {{ preview_summary.total }}</span>
|
|
||||||
<span class="badge bg-info text-dark">Matched: {{ preview_summary.matched }}</span>
|
|
||||||
<span class="badge bg-success">Will update: {{ preview_summary.will_update }}</span>
|
|
||||||
<span class="badge bg-warning text-dark">Conflicts: {{ preview_summary.conflicts }}</span>
|
|
||||||
<span class="badge bg-warning">Force overwrite: {{ preview_summary.force_overwrite }}</span>
|
|
||||||
<span class="badge bg-warning text-dark">Force ignore errors: {{ preview_summary.force_ignore_errors }}</span>
|
|
||||||
<span class="badge bg-danger">Resolve errors: {{ preview_summary.resolve_errors }}</span>
|
|
||||||
<span class="badge bg-secondary">No change: {{ preview_summary.no_change }}</span>
|
|
||||||
<span class="badge bg-secondary">No match: {{ preview_summary.no_match }}</span>
|
|
||||||
</div>
|
|
||||||
<form action="/bulk_change_feed_urls" method="post" class="mb-2">
|
|
||||||
<input type="hidden" name="webhook_url" value="{{ webhook_url }}" />
|
|
||||||
<input type="hidden" name="replace_from" value="{{ replace_from }}" />
|
|
||||||
<input type="hidden" name="replace_to" value="{{ replace_to }}" />
|
|
||||||
<input type="hidden"
|
|
||||||
name="resolve_urls"
|
|
||||||
value="{{ 'true' if resolve_urls else 'false' }}" />
|
|
||||||
<input type="hidden"
|
|
||||||
name="force_update"
|
|
||||||
value="{{ 'true' if force_update else 'false' }}" />
|
|
||||||
<button type="submit"
|
|
||||||
class="btn btn-warning w-100"
|
|
||||||
{% if preview_change_count == 0 %}disabled{% endif %}
|
|
||||||
onclick="return confirm('Apply these feed URL updates?');">Apply mass update</button>
|
|
||||||
</form>
|
|
||||||
<div class="table-responsive mt-2">
|
|
||||||
<table class="table table-sm table-dark table-striped align-middle mb-0">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th scope="col">Old URL</th>
|
|
||||||
<th scope="col">New URL</th>
|
|
||||||
<th scope="col">Status</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
{% for row in preview_rows %}
|
|
||||||
<tr>
|
|
||||||
<td>
|
|
||||||
<code>{{ row.old_url }}</code>
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
<code>{{ row.resolved_url if resolve_urls else row.candidate_url }}</code>
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
{% if not row.has_match %}
|
|
||||||
<span class="badge bg-secondary">No match</span>
|
|
||||||
{% elif row.will_force_ignore_errors %}
|
|
||||||
<span class="badge bg-warning text-dark">Will force update (ignore resolve error)</span>
|
|
||||||
{% elif row.resolution_error %}
|
|
||||||
<span class="badge bg-danger">{{ row.resolution_error }}</span>
|
|
||||||
{% elif row.will_force_overwrite %}
|
|
||||||
<span class="badge bg-warning">Will force overwrite</span>
|
|
||||||
{% elif row.target_exists %}
|
|
||||||
<span class="badge bg-warning text-dark">Conflict: target URL exists</span>
|
|
||||||
{% elif row.will_change %}
|
|
||||||
<span class="badge bg-success">Will update</span>
|
|
||||||
{% else %}
|
|
||||||
<span class="badge bg-secondary">No change</span>
|
|
||||||
{% endif %}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
{% elif replace_from %}
|
|
||||||
<p class="small text-muted mb-0">No preview rows found for that replacement pattern.</p>
|
|
||||||
{% endif %}
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
|
@ -17,6 +18,7 @@
|
||||||
{% block head %}
|
{% block head %}
|
||||||
{% endblock head %}
|
{% endblock head %}
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body class="text-white-50">
|
<body class="text-white-50">
|
||||||
{% include "nav.html" %}
|
{% include "nav.html" %}
|
||||||
<div class="p-2 mb-2">
|
<div class="p-2 mb-2">
|
||||||
|
|
@ -25,12 +27,10 @@
|
||||||
{% if messages %}
|
{% if messages %}
|
||||||
<div class="alert alert-warning alert-dismissible fade show" role="alert">
|
<div class="alert alert-warning alert-dismissible fade show" role="alert">
|
||||||
<pre>{{ messages }}</pre>
|
<pre>{{ messages }}</pre>
|
||||||
<button type="button"
|
<button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button>
|
||||||
class="btn-close"
|
|
||||||
data-bs-dismiss="alert"
|
|
||||||
aria-label="Close"></button>
|
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
{% endblock content %}
|
{% endblock content %}
|
||||||
<footer class="d-flex flex-wrap justify-content-between align-items-center py-3 my-4 border-top">
|
<footer class="d-flex flex-wrap justify-content-between align-items-center py-3 my-4 border-top">
|
||||||
|
|
@ -52,9 +52,7 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/htmx.org@2.0.8/dist/htmx.min.js"
|
|
||||||
integrity="sha384-/TgkGk7p307TH7EXJDuUlgG3Ce1UVolAOFopFekQkkXihi5u/6OCvVKyz1W+idaz"
|
|
||||||
crossorigin="anonymous"></script>
|
|
||||||
<script src="/static/bootstrap.min.js" defer></script>
|
<script src="/static/bootstrap.min.js" defer></script>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|
|
||||||
|
|
@ -32,10 +32,10 @@
|
||||||
{% for hook_from_context in webhooks %}
|
{% for hook_from_context in webhooks %}
|
||||||
<div class="p-2 mb-3 border border-dark">
|
<div class="p-2 mb-3 border border-dark">
|
||||||
<div class="d-flex justify-content-between align-items-center mb-3">
|
<div class="d-flex justify-content-between align-items-center mb-3">
|
||||||
<h2 class="h5 mb-0">{{ hook_from_context.name }}</h2>
|
<h2 class="h5 mb-0">
|
||||||
<a class="text-muted fs-6 btn btn-outline-light btn-sm ms-auto me-2"
|
<a class="text-muted" href="/webhooks">{{ hook_from_context.name }}</a>
|
||||||
href="/webhook_entries?webhook_url={{ hook_from_context.url|encode_url }}">Settings</a>
|
</h2>
|
||||||
<a class="text-muted fs-6 btn btn-outline-light btn-sm"
|
<a class="text-muted"
|
||||||
href="/webhook_entries?webhook_url={{ hook_from_context.url|encode_url }}">View Latest Entries</a>
|
href="/webhook_entries?webhook_url={{ hook_from_context.url|encode_url }}">View Latest Entries</a>
|
||||||
</div>
|
</div>
|
||||||
<!-- Group feeds by domain within each webhook -->
|
<!-- Group feeds by domain within each webhook -->
|
||||||
|
|
|
||||||
|
|
@ -1,149 +1,20 @@
|
||||||
{% extends "base.html" %}
|
{% extends "base.html" %}
|
||||||
{% block title %}
|
{% block title %}
|
||||||
| {{ webhook_name }}
|
| {{ webhook_name }} - Latest Entries
|
||||||
{% endblock title %}
|
{% endblock title %}
|
||||||
{% block content %}
|
{% block content %}
|
||||||
{% if message %}<div class="alert alert-info" role="alert">{{ message }}</div>{% endif %}
|
|
||||||
<div class="card mb-3 border border-dark p-3 text-light">
|
<div class="card mb-3 border border-dark p-3 text-light">
|
||||||
<div class="d-flex flex-column flex-md-row justify-content-between gap-3">
|
<!-- Webhook Title -->
|
||||||
<div>
|
<h2>{{ webhook_name }} - Latest Entries ({{ total_entries }} total from {{ feeds_count }} feeds)</h2>
|
||||||
<h2 class="mb-2">{{ webhook_name }}</h2>
|
<!-- Webhook Info -->
|
||||||
<p class="text-muted mb-1">
|
<div class="mt-3">
|
||||||
{{ total_entries }} total from {{ feeds_count }} feed{{ 's' if feeds_count != 1 else '' }}
|
<p class="text-muted">
|
||||||
</p>
|
|
||||||
<p class="text-muted mb-0">
|
|
||||||
<code>{{ webhook_url }}</code>
|
<code>{{ webhook_url }}</code>
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="d-flex gap-2 align-items-start">
|
|
||||||
<a class="btn btn-outline-light btn-sm" href="/">Back to dashboard</a>
|
|
||||||
<a class="btn btn-outline-info btn-sm" href="/webhooks">All webhooks</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="row g-3 mb-3">
|
|
||||||
<div class="col-lg-5">
|
|
||||||
<div class="card border border-dark p-3 text-light h-100">
|
|
||||||
<h3 class="h5">Settings</h3>
|
|
||||||
<ul class="list-unstyled text-muted mb-3">
|
|
||||||
<li>
|
|
||||||
<strong>Custom name:</strong> {{ hook_info.custom_name }}
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<strong>Discord name:</strong> {{ hook_info.name or 'Unavailable' }}
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<strong>Webhook:</strong>
|
|
||||||
<a class="text-muted" href="{{ hook_info.url }}">{{ hook_info.url | replace('https://discord.com/api/webhooks', '') }}</a>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<form action="/modify_webhook" method="post" class="row g-3 mb-3">
|
|
||||||
<input type="hidden" name="old_hook" value="{{ webhook_url }}" />
|
|
||||||
<input type="hidden"
|
|
||||||
name="redirect_to"
|
|
||||||
value="/webhook_entries?webhook_url={{ webhook_url|encode_url }}" />
|
|
||||||
<div class="col-12">
|
|
||||||
<label for="new_hook" class="form-label">Modify Webhook</label>
|
|
||||||
<input type="text"
|
|
||||||
name="new_hook"
|
|
||||||
id="new_hook"
|
|
||||||
class="form-control border text-muted bg-dark"
|
|
||||||
placeholder="Enter new webhook URL" />
|
|
||||||
</div>
|
|
||||||
<div class="col-12">
|
|
||||||
<button type="submit" class="btn btn-primary w-100">Save Webhook URL</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
<form action="/delete_webhook" method="post">
|
|
||||||
<input type="hidden" name="webhook_url" value="{{ webhook_url }}" />
|
|
||||||
<button type="submit"
|
|
||||||
class="btn btn-danger w-100"
|
|
||||||
onclick="return confirm('Are you sure you want to delete this webhook?');">
|
|
||||||
Delete Webhook
|
|
||||||
</button>
|
|
||||||
</form>
|
|
||||||
<hr class="border-secondary my-3" />
|
|
||||||
<h3 class="h6">Mass update feed URLs</h3>
|
|
||||||
<p class="text-muted small mb-2">Replace part of feed URLs for all feeds attached to this webhook.</p>
|
|
||||||
<form action="/webhook_entries"
|
|
||||||
method="get"
|
|
||||||
class="row g-2 mb-2"
|
|
||||||
hx-get="/webhook_entries_mass_update_preview"
|
|
||||||
hx-target="#mass-update-preview"
|
|
||||||
hx-swap="innerHTML">
|
|
||||||
<input type="hidden" name="webhook_url" value="{{ webhook_url|encode_url }}" />
|
|
||||||
<div class="col-12">
|
|
||||||
<label for="replace_from" class="form-label small">Replace this</label>
|
|
||||||
<input type="text"
|
|
||||||
name="replace_from"
|
|
||||||
id="replace_from"
|
|
||||||
class="form-control border text-muted bg-dark"
|
|
||||||
value="{{ replace_from }}"
|
|
||||||
placeholder="https://old-domain.example" />
|
|
||||||
</div>
|
|
||||||
<div class="col-12">
|
|
||||||
<label for="replace_to" class="form-label small">With this</label>
|
|
||||||
<input type="text"
|
|
||||||
name="replace_to"
|
|
||||||
id="replace_to"
|
|
||||||
class="form-control border text-muted bg-dark"
|
|
||||||
value="{{ replace_to }}"
|
|
||||||
placeholder="https://new-domain.example" />
|
|
||||||
</div>
|
|
||||||
<div class="col-12 form-check ms-1">
|
|
||||||
<input class="form-check-input"
|
|
||||||
type="checkbox"
|
|
||||||
value="true"
|
|
||||||
id="resolve_urls"
|
|
||||||
name="resolve_urls"
|
|
||||||
{% if resolve_urls %}checked{% endif %} />
|
|
||||||
<label class="form-check-label small" for="resolve_urls">Resolve final URL with redirects (uses httpx)</label>
|
|
||||||
</div>
|
|
||||||
<div class="col-12 form-check ms-1">
|
|
||||||
<input class="form-check-input"
|
|
||||||
type="checkbox"
|
|
||||||
value="true"
|
|
||||||
id="force_update"
|
|
||||||
name="force_update"
|
|
||||||
{% if force_update %}checked{% endif %} />
|
|
||||||
<label class="form-check-label small" for="force_update">Force update (overwrite conflicting target feed URLs)</label>
|
|
||||||
</div>
|
|
||||||
<div class="col-12">
|
|
||||||
<button type="submit" class="btn btn-outline-warning w-100">Preview changes</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
<div id="mass-update-preview">{% include "_webhook_mass_update_preview.html" %}</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="col-lg-7">
|
|
||||||
<div class="card border border-dark p-3 text-light h-100">
|
|
||||||
<h3 class="h5">Attached feeds</h3>
|
|
||||||
{% if webhook_feeds %}
|
|
||||||
<ul class="list-group list-unstyled mb-0">
|
|
||||||
{% for feed in webhook_feeds %}
|
|
||||||
<li class="mb-2">
|
|
||||||
<a class="text-muted" href="/feed?feed_url={{ feed.url|encode_url }}">
|
|
||||||
{% if feed.title %}
|
|
||||||
{{ feed.title }}
|
|
||||||
{% else %}
|
|
||||||
{{ feed.url }}
|
|
||||||
{% endif %}
|
|
||||||
</a>
|
|
||||||
{% if feed.title %}<span class="text-muted">- {{ feed.url }}</span>{% endif %}
|
|
||||||
{% if not feed.updates_enabled %}<span class="text-warning">Disabled</span>{% endif %}
|
|
||||||
{% if feed.last_exception %}<span class="text-danger">({{ feed.last_exception.value_str }})</span>{% endif %}
|
|
||||||
</li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% else %}
|
|
||||||
<p class="text-muted mb-0">No feeds are attached to this webhook yet.</p>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
{# Rendered HTML content #}
|
{# Rendered HTML content #}
|
||||||
{% if entries %}
|
{% if entries %}
|
||||||
<h3 class="h5 text-light">Latest entries</h3>
|
|
||||||
<pre>{{ html|safe }}</pre>
|
<pre>{{ html|safe }}</pre>
|
||||||
{% if is_show_more_entries_button_visible and last_entry %}
|
{% if is_show_more_entries_button_visible and last_entry %}
|
||||||
<a class="btn btn-dark mt-3"
|
<a class="btn btn-dark mt-3"
|
||||||
|
|
|
||||||
|
|
@ -6,24 +6,10 @@ import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
def pytest_configure() -> None:
|
||||||
def pytest_addoption(parser: pytest.Parser) -> None:
|
|
||||||
"""Register custom command-line options for optional integration tests."""
|
|
||||||
parser.addoption(
|
|
||||||
"--run-real-git-backup-tests",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="Run tests that push git backup state to a real repository.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def pytest_sessionstart(session: pytest.Session) -> None:
|
|
||||||
"""Isolate persistent app state per xdist worker to avoid cross-worker test interference."""
|
"""Isolate persistent app state per xdist worker to avoid cross-worker test interference."""
|
||||||
worker_id: str = os.environ.get("PYTEST_XDIST_WORKER", "gw0")
|
worker_id: str = os.environ.get("PYTEST_XDIST_WORKER", "gw0")
|
||||||
worker_data_dir: Path = Path(tempfile.gettempdir()) / "discord-rss-bot-tests" / worker_id
|
worker_data_dir: Path = Path(tempfile.gettempdir()) / "discord-rss-bot-tests" / worker_id
|
||||||
|
|
@ -51,10 +37,4 @@ def pytest_sessionstart(session: pytest.Session) -> None:
|
||||||
current_reader.close()
|
current_reader.close()
|
||||||
get_reader: Any = getattr(settings_module, "get_reader", None)
|
get_reader: Any = getattr(settings_module, "get_reader", None)
|
||||||
if callable(get_reader):
|
if callable(get_reader):
|
||||||
get_reader()
|
main_module.reader = get_reader()
|
||||||
|
|
||||||
|
|
||||||
def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None:
|
|
||||||
"""Skip real git-repo push tests unless explicitly requested."""
|
|
||||||
if config.getoption("--run-real-git-backup-tests"):
|
|
||||||
return
|
|
||||||
|
|
|
||||||
|
|
@ -38,7 +38,7 @@ def test_has_black_tags() -> None:
|
||||||
|
|
||||||
# Test feed without any blacklist tags
|
# Test feed without any blacklist tags
|
||||||
assert_msg: str = "Feed should not have any blacklist tags"
|
assert_msg: str = "Feed should not have any blacklist tags"
|
||||||
assert feed_has_blacklist_tags(reader=get_reader(), feed=feed) is False, assert_msg
|
assert feed_has_blacklist_tags(custom_reader=get_reader(), feed=feed) is False, assert_msg
|
||||||
|
|
||||||
check_if_has_tag(reader, feed, "blacklist_title")
|
check_if_has_tag(reader, feed, "blacklist_title")
|
||||||
check_if_has_tag(reader, feed, "blacklist_summary")
|
check_if_has_tag(reader, feed, "blacklist_summary")
|
||||||
|
|
@ -58,11 +58,11 @@ def test_has_black_tags() -> None:
|
||||||
def check_if_has_tag(reader: Reader, feed: Feed, blacklist_name: str) -> None:
|
def check_if_has_tag(reader: Reader, feed: Feed, blacklist_name: str) -> None:
|
||||||
reader.set_tag(feed, blacklist_name, "a") # pyright: ignore[reportArgumentType]
|
reader.set_tag(feed, blacklist_name, "a") # pyright: ignore[reportArgumentType]
|
||||||
assert_msg: str = f"Feed should have blacklist tags: {blacklist_name}"
|
assert_msg: str = f"Feed should have blacklist tags: {blacklist_name}"
|
||||||
assert feed_has_blacklist_tags(reader=reader, feed=feed) is True, assert_msg
|
assert feed_has_blacklist_tags(custom_reader=reader, feed=feed) is True, assert_msg
|
||||||
|
|
||||||
asset_msg: str = f"Feed should not have any blacklist tags: {blacklist_name}"
|
asset_msg: str = f"Feed should not have any blacklist tags: {blacklist_name}"
|
||||||
reader.delete_tag(feed, blacklist_name)
|
reader.delete_tag(feed, blacklist_name)
|
||||||
assert feed_has_blacklist_tags(reader=reader, feed=feed) is False, asset_msg
|
assert feed_has_blacklist_tags(custom_reader=reader, feed=feed) is False, asset_msg
|
||||||
|
|
||||||
|
|
||||||
def test_should_be_skipped() -> None:
|
def test_should_be_skipped() -> None:
|
||||||
|
|
|
||||||
|
|
@ -45,39 +45,39 @@ def test_entry_is_whitelisted() -> None:
|
||||||
Path.mkdir(Path(temp_dir), exist_ok=True)
|
Path.mkdir(Path(temp_dir), exist_ok=True)
|
||||||
|
|
||||||
custom_loc: pathlib.Path = pathlib.Path(temp_dir, "custom_loc_db.sqlite")
|
custom_loc: pathlib.Path = pathlib.Path(temp_dir, "custom_loc_db.sqlite")
|
||||||
reader: Reader = get_reader(custom_location=str(custom_loc))
|
custom_reader: Reader = get_reader(custom_location=str(custom_loc))
|
||||||
|
|
||||||
# Add a feed to the database.
|
# Add a feed to the database.
|
||||||
reader.add_feed("https://lovinator.space/rss_test.xml")
|
custom_reader.add_feed("https://lovinator.space/rss_test.xml")
|
||||||
reader.update_feed("https://lovinator.space/rss_test.xml")
|
custom_reader.update_feed("https://lovinator.space/rss_test.xml")
|
||||||
|
|
||||||
# whitelist_title
|
# whitelist_title
|
||||||
reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_title", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_title", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
||||||
for entry in reader.get_entries():
|
for entry in custom_reader.get_entries():
|
||||||
if entry_is_whitelisted(entry, reader=reader) is True:
|
if entry_is_whitelisted(entry) is True:
|
||||||
assert entry.title == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.title}"
|
assert entry.title == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.title}"
|
||||||
break
|
break
|
||||||
reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_title")
|
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_title")
|
||||||
|
|
||||||
# whitelist_summary
|
# whitelist_summary
|
||||||
reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_summary", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_summary", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
||||||
for entry in reader.get_entries():
|
for entry in custom_reader.get_entries():
|
||||||
if entry_is_whitelisted(entry, reader=reader) is True:
|
if entry_is_whitelisted(entry) is True:
|
||||||
assert entry.summary == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.summary}"
|
assert entry.summary == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.summary}"
|
||||||
break
|
break
|
||||||
reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_summary")
|
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_summary")
|
||||||
|
|
||||||
# whitelist_content
|
# whitelist_content
|
||||||
reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_content", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_content", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
||||||
for entry in reader.get_entries():
|
for entry in custom_reader.get_entries():
|
||||||
if entry_is_whitelisted(entry, reader=reader) is True:
|
if entry_is_whitelisted(entry) is True:
|
||||||
assert_msg = f"Expected: <p>ffdnfdnfdnfdnfdndfn</p>, Got: {entry.content[0].value}"
|
assert_msg = f"Expected: <p>ffdnfdnfdnfdnfdndfn</p>, Got: {entry.content[0].value}"
|
||||||
assert entry.content[0].value == "<p>ffdnfdnfdnfdnfdndfn</p>", assert_msg
|
assert entry.content[0].value == "<p>ffdnfdnfdnfdnfdndfn</p>", assert_msg
|
||||||
break
|
break
|
||||||
reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_content")
|
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_content")
|
||||||
|
|
||||||
# Close the reader, so we can delete the directory.
|
# Close the reader, so we can delete the directory.
|
||||||
reader.close()
|
custom_reader.close()
|
||||||
|
|
||||||
|
|
||||||
def test_entry_is_blacklisted() -> None:
|
def test_entry_is_blacklisted() -> None:
|
||||||
|
|
@ -87,36 +87,36 @@ def test_entry_is_blacklisted() -> None:
|
||||||
Path.mkdir(Path(temp_dir), exist_ok=True)
|
Path.mkdir(Path(temp_dir), exist_ok=True)
|
||||||
|
|
||||||
custom_loc: pathlib.Path = pathlib.Path(temp_dir, "custom_loc_db.sqlite")
|
custom_loc: pathlib.Path = pathlib.Path(temp_dir, "custom_loc_db.sqlite")
|
||||||
reader: Reader = get_reader(custom_location=str(custom_loc))
|
custom_reader: Reader = get_reader(custom_location=str(custom_loc))
|
||||||
|
|
||||||
# Add a feed to the database.
|
# Add a feed to the database.
|
||||||
reader.add_feed("https://lovinator.space/rss_test.xml")
|
custom_reader.add_feed("https://lovinator.space/rss_test.xml")
|
||||||
reader.update_feed("https://lovinator.space/rss_test.xml")
|
custom_reader.update_feed("https://lovinator.space/rss_test.xml")
|
||||||
|
|
||||||
# blacklist_title
|
# blacklist_title
|
||||||
reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_title", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_title", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
||||||
for entry in reader.get_entries():
|
for entry in custom_reader.get_entries():
|
||||||
if entry_is_blacklisted(entry, reader=reader) is True:
|
if entry_is_blacklisted(entry) is True:
|
||||||
assert entry.title == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.title}"
|
assert entry.title == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.title}"
|
||||||
break
|
break
|
||||||
reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_title")
|
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_title")
|
||||||
|
|
||||||
# blacklist_summary
|
# blacklist_summary
|
||||||
reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_summary", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_summary", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
||||||
for entry in reader.get_entries():
|
for entry in custom_reader.get_entries():
|
||||||
if entry_is_blacklisted(entry, reader=reader) is True:
|
if entry_is_blacklisted(entry) is True:
|
||||||
assert entry.summary == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.summary}"
|
assert entry.summary == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.summary}"
|
||||||
break
|
break
|
||||||
reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_summary")
|
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_summary")
|
||||||
|
|
||||||
# blacklist_content
|
# blacklist_content
|
||||||
reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_content", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_content", "fvnnnfnfdnfdnfd") # pyright: ignore[reportArgumentType]
|
||||||
for entry in reader.get_entries():
|
for entry in custom_reader.get_entries():
|
||||||
if entry_is_blacklisted(entry, reader=reader) is True:
|
if entry_is_blacklisted(entry) is True:
|
||||||
assert_msg = f"Expected: <p>ffdnfdnfdnfdnfdndfn</p>, Got: {entry.content[0].value}"
|
assert_msg = f"Expected: <p>ffdnfdnfdnfdnfdndfn</p>, Got: {entry.content[0].value}"
|
||||||
assert entry.content[0].value == "<p>ffdnfdnfdnfdnfdndfn</p>", assert_msg
|
assert entry.content[0].value == "<p>ffdnfdnfdnfdnfdndfn</p>", assert_msg
|
||||||
break
|
break
|
||||||
reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_content")
|
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_content")
|
||||||
|
|
||||||
# Close the reader, so we can delete the directory.
|
# Close the reader, so we can delete the directory.
|
||||||
reader.close()
|
custom_reader.close()
|
||||||
|
|
|
||||||
|
|
@ -102,10 +102,12 @@ def test_format_entry_html_for_discord_does_not_preserve_invalid_timestamp_style
|
||||||
|
|
||||||
|
|
||||||
@patch("discord_rss_bot.custom_message.get_custom_message")
|
@patch("discord_rss_bot.custom_message.get_custom_message")
|
||||||
|
@patch("discord_rss_bot.custom_message.get_reader")
|
||||||
def test_replace_tags_in_text_message_preserves_timestamp_tags(
|
def test_replace_tags_in_text_message_preserves_timestamp_tags(
|
||||||
|
mock_get_reader: MagicMock,
|
||||||
mock_get_custom_message: MagicMock,
|
mock_get_custom_message: MagicMock,
|
||||||
) -> None:
|
) -> None:
|
||||||
mock_reader = MagicMock()
|
mock_get_reader.return_value = MagicMock()
|
||||||
mock_get_custom_message.return_value = "{{entry_summary}}"
|
mock_get_custom_message.return_value = "{{entry_summary}}"
|
||||||
summary_parts: list[str] = [
|
summary_parts: list[str] = [
|
||||||
f"<p>Format {index}: ({timestamp_tag.replace('<', '<').replace('>', '>')})</p>"
|
f"<p>Format {index}: ({timestamp_tag.replace('<', '<').replace('>', '>')})</p>"
|
||||||
|
|
@ -114,17 +116,19 @@ def test_replace_tags_in_text_message_preserves_timestamp_tags(
|
||||||
entry_ns: SimpleNamespace = make_entry("".join(summary_parts))
|
entry_ns: SimpleNamespace = make_entry("".join(summary_parts))
|
||||||
|
|
||||||
entry: Entry = typing.cast("Entry", entry_ns)
|
entry: Entry = typing.cast("Entry", entry_ns)
|
||||||
rendered: str = replace_tags_in_text_message(entry, reader=mock_reader)
|
rendered: str = replace_tags_in_text_message(entry)
|
||||||
|
|
||||||
for timestamp_tag in TIMESTAMP_FORMATS:
|
for timestamp_tag in TIMESTAMP_FORMATS:
|
||||||
assert timestamp_tag in rendered
|
assert timestamp_tag in rendered
|
||||||
|
|
||||||
|
|
||||||
@patch("discord_rss_bot.custom_message.get_embed")
|
@patch("discord_rss_bot.custom_message.get_embed")
|
||||||
|
@patch("discord_rss_bot.custom_message.get_reader")
|
||||||
def test_replace_tags_in_embed_preserves_timestamp_tags(
|
def test_replace_tags_in_embed_preserves_timestamp_tags(
|
||||||
|
mock_get_reader: MagicMock,
|
||||||
mock_get_embed: MagicMock,
|
mock_get_embed: MagicMock,
|
||||||
) -> None:
|
) -> None:
|
||||||
mock_reader = MagicMock()
|
mock_get_reader.return_value = MagicMock()
|
||||||
mock_get_embed.return_value = CustomEmbed(description="{{entry_summary}}")
|
mock_get_embed.return_value = CustomEmbed(description="{{entry_summary}}")
|
||||||
summary_parts: list[str] = [
|
summary_parts: list[str] = [
|
||||||
f"<p>Format {index}: ({timestamp_tag.replace('<', '<').replace('>', '>')})</p>"
|
f"<p>Format {index}: ({timestamp_tag.replace('<', '<').replace('>', '>')})</p>"
|
||||||
|
|
@ -134,7 +138,7 @@ def test_replace_tags_in_embed_preserves_timestamp_tags(
|
||||||
|
|
||||||
entry: Entry = typing.cast("Entry", entry_ns)
|
entry: Entry = typing.cast("Entry", entry_ns)
|
||||||
|
|
||||||
embed: CustomEmbed = replace_tags_in_embed(entry_ns.feed, entry, reader=mock_reader)
|
embed: CustomEmbed = replace_tags_in_embed(entry_ns.feed, entry)
|
||||||
|
|
||||||
for timestamp_tag in TIMESTAMP_FORMATS:
|
for timestamp_tag in TIMESTAMP_FORMATS:
|
||||||
assert timestamp_tag in embed.description
|
assert timestamp_tag in embed.description
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,7 @@ from discord_rss_bot.feeds import send_entry_to_discord
|
||||||
from discord_rss_bot.feeds import send_to_discord
|
from discord_rss_bot.feeds import send_to_discord
|
||||||
from discord_rss_bot.feeds import should_send_embed_check
|
from discord_rss_bot.feeds import should_send_embed_check
|
||||||
from discord_rss_bot.feeds import truncate_webhook_message
|
from discord_rss_bot.feeds import truncate_webhook_message
|
||||||
|
from discord_rss_bot.missing_tags import add_missing_tags
|
||||||
|
|
||||||
|
|
||||||
def test_send_to_discord() -> None:
|
def test_send_to_discord() -> None:
|
||||||
|
|
@ -34,6 +35,8 @@ def test_send_to_discord() -> None:
|
||||||
# Add a feed to the reader.
|
# Add a feed to the reader.
|
||||||
reader.add_feed("https://www.reddit.com/r/Python/.rss")
|
reader.add_feed("https://www.reddit.com/r/Python/.rss")
|
||||||
|
|
||||||
|
add_missing_tags(reader)
|
||||||
|
|
||||||
# Update the feed to get the entries.
|
# Update the feed to get the entries.
|
||||||
reader.update_feeds()
|
reader.update_feeds()
|
||||||
|
|
||||||
|
|
@ -55,7 +58,7 @@ def test_send_to_discord() -> None:
|
||||||
assert reader.get_tag(feed, "webhook") == webhook_url, f"The webhook URL should be '{webhook_url}'."
|
assert reader.get_tag(feed, "webhook") == webhook_url, f"The webhook URL should be '{webhook_url}'."
|
||||||
|
|
||||||
# Send the feed to Discord.
|
# Send the feed to Discord.
|
||||||
send_to_discord(reader=reader, feed=feed, do_once=True)
|
send_to_discord(custom_reader=reader, feed=feed, do_once=True)
|
||||||
|
|
||||||
# Close the reader, so we can delete the directory.
|
# Close the reader, so we can delete the directory.
|
||||||
reader.close()
|
reader.close()
|
||||||
|
|
@ -188,7 +191,7 @@ def test_send_entry_to_discord_youtube_feed(
|
||||||
mock_discord_webhook.return_value = mock_webhook
|
mock_discord_webhook.return_value = mock_webhook
|
||||||
|
|
||||||
# Call the function
|
# Call the function
|
||||||
send_entry_to_discord(mock_entry, mock_reader)
|
send_entry_to_discord(mock_entry)
|
||||||
|
|
||||||
# Assertions
|
# Assertions
|
||||||
mock_create_embed.assert_not_called()
|
mock_create_embed.assert_not_called()
|
||||||
|
|
@ -200,7 +203,7 @@ def test_send_entry_to_discord_youtube_feed(
|
||||||
assert webhook_call_kwargs["url"] == "https://discord.com/api/webhooks/123/abc"
|
assert webhook_call_kwargs["url"] == "https://discord.com/api/webhooks/123/abc"
|
||||||
|
|
||||||
# Verify execute_webhook was called
|
# Verify execute_webhook was called
|
||||||
mock_execute_webhook.assert_called_once_with(mock_webhook, mock_entry, reader=mock_reader)
|
mock_execute_webhook.assert_called_once_with(mock_webhook, mock_entry)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_domain_youtube_feed() -> None:
|
def test_extract_domain_youtube_feed() -> None:
|
||||||
|
|
|
||||||
|
|
@ -304,6 +304,102 @@ def test_commit_state_change_no_push_when_remote_unset(monkeypatch: pytest.Monke
|
||||||
assert not push_calls, "git push should NOT be called when GIT_BACKUP_REMOTE is not set"
|
assert not push_calls, "git push should NOT be called when GIT_BACKUP_REMOTE is not set"
|
||||||
|
|
||||||
|
|
||||||
|
@SKIP_IF_NO_GIT
|
||||||
|
def test_commit_state_change_e2e_push_to_bare_repo(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None:
|
||||||
|
"""End-to-end test: commit_state_change pushes to a real bare git repository."""
|
||||||
|
git_executable: str | None = shutil.which("git")
|
||||||
|
assert git_executable is not None, "git executable not found"
|
||||||
|
|
||||||
|
# Create a bare remote repository
|
||||||
|
bare_repo_path: Path = tmp_path / "remote.git"
|
||||||
|
subprocess.run([git_executable, "init", "--bare", str(bare_repo_path)], check=True, capture_output=True) # noqa: S603
|
||||||
|
|
||||||
|
# Configure backup with remote pointing to bare repo
|
||||||
|
backup_path: Path = tmp_path / "backup"
|
||||||
|
monkeypatch.setenv("GIT_BACKUP_PATH", str(backup_path))
|
||||||
|
monkeypatch.setenv("GIT_BACKUP_REMOTE", str(bare_repo_path))
|
||||||
|
|
||||||
|
# Create mock reader with some state
|
||||||
|
mock_reader = MagicMock()
|
||||||
|
feed1 = MagicMock()
|
||||||
|
feed1.url = "https://example.com/feed.rss"
|
||||||
|
mock_reader.get_feeds.return_value = [feed1]
|
||||||
|
|
||||||
|
def get_tag_side_effect(
|
||||||
|
feed_or_key: tuple | str,
|
||||||
|
tag: str | None = None,
|
||||||
|
default: str | None = None,
|
||||||
|
) -> list[Any] | str | None:
|
||||||
|
if feed_or_key == ():
|
||||||
|
return []
|
||||||
|
if tag == "webhook":
|
||||||
|
return "https://discord.com/api/webhooks/123/abc"
|
||||||
|
return default
|
||||||
|
|
||||||
|
mock_reader.get_tag.side_effect = get_tag_side_effect
|
||||||
|
|
||||||
|
# Perform backup with commit and push
|
||||||
|
commit_state_change(mock_reader, "Initial backup")
|
||||||
|
|
||||||
|
# Verify commit exists in local backup repo
|
||||||
|
result: subprocess.CompletedProcess[str] = subprocess.run( # noqa: S603
|
||||||
|
[git_executable, "-C", str(backup_path), "log", "--oneline"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
assert "Initial backup" in result.stdout
|
||||||
|
|
||||||
|
# Verify origin remote is configured correctly
|
||||||
|
result = subprocess.run( # noqa: S603
|
||||||
|
[git_executable, "-C", str(backup_path), "remote", "get-url", "origin"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
assert result.stdout.strip() == str(bare_repo_path)
|
||||||
|
|
||||||
|
# Verify commit was pushed to the bare remote
|
||||||
|
result = subprocess.run( # noqa: S603
|
||||||
|
[git_executable, "-C", str(bare_repo_path), "log", "--oneline", "master"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
assert "Initial backup" in result.stdout
|
||||||
|
|
||||||
|
# Verify state.json content in the remote
|
||||||
|
result = subprocess.run( # noqa: S603
|
||||||
|
[git_executable, "-C", str(bare_repo_path), "show", "master:state.json"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
state_data: dict[str, Any] = json.loads(result.stdout)
|
||||||
|
assert state_data["feeds"][0]["url"] == "https://example.com/feed.rss"
|
||||||
|
assert state_data["feeds"][0]["webhook"] == "https://discord.com/api/webhooks/123/abc"
|
||||||
|
|
||||||
|
# Perform a second backup to verify subsequent pushes work
|
||||||
|
feed2 = MagicMock()
|
||||||
|
feed2.url = "https://another.com/feed.xml"
|
||||||
|
mock_reader.get_feeds.return_value = [feed1, feed2]
|
||||||
|
|
||||||
|
commit_state_change(mock_reader, "Add second feed")
|
||||||
|
|
||||||
|
# Verify both commits are in the remote
|
||||||
|
result = subprocess.run( # noqa: S603
|
||||||
|
[git_executable, "-C", str(bare_repo_path), "log", "--oneline", "master"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
assert "Initial backup" in result.stdout
|
||||||
|
assert "Add second feed" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
# Integration tests for embed-related endpoint backups
|
||||||
|
|
||||||
|
|
||||||
client: TestClient = TestClient(app)
|
client: TestClient = TestClient(app)
|
||||||
test_webhook_name: str = "Test Backup Webhook"
|
test_webhook_name: str = "Test Backup Webhook"
|
||||||
test_webhook_url: str = "https://discord.com/api/webhooks/999999999/testbackupwebhook"
|
test_webhook_url: str = "https://discord.com/api/webhooks/999999999/testbackupwebhook"
|
||||||
|
|
|
||||||
|
|
@ -4,19 +4,13 @@ import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from dataclasses import field
|
from dataclasses import field
|
||||||
from datetime import UTC
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from typing import cast
|
from typing import cast
|
||||||
from unittest.mock import MagicMock
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
from fastapi.testclient import TestClient
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
import discord_rss_bot.main as main_module
|
|
||||||
from discord_rss_bot.main import app
|
from discord_rss_bot.main import app
|
||||||
from discord_rss_bot.main import create_html_for_feed
|
from discord_rss_bot.main import create_html_for_feed
|
||||||
from discord_rss_bot.main import get_reader_dependency
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
@ -160,9 +154,6 @@ def test_get() -> None:
|
||||||
response: Response = client.get(url="/webhooks")
|
response: Response = client.get(url="/webhooks")
|
||||||
assert response.status_code == 200, f"/webhooks failed: {response.text}"
|
assert response.status_code == 200, f"/webhooks failed: {response.text}"
|
||||||
|
|
||||||
response = client.get(url="/webhook_entries", params={"webhook_url": webhook_url})
|
|
||||||
assert response.status_code == 200, f"/webhook_entries failed: {response.text}"
|
|
||||||
|
|
||||||
response: Response = client.get(url="/whitelist", params={"feed_url": encoded_feed_url(feed_url)})
|
response: Response = client.get(url="/whitelist", params={"feed_url": encoded_feed_url(feed_url)})
|
||||||
assert response.status_code == 200, f"/whitelist failed: {response.text}"
|
assert response.status_code == 200, f"/whitelist failed: {response.text}"
|
||||||
|
|
||||||
|
|
@ -308,147 +299,6 @@ def test_change_feed_url() -> None:
|
||||||
client.post(url="/remove", data={"feed_url": new_feed_url})
|
client.post(url="/remove", data={"feed_url": new_feed_url})
|
||||||
|
|
||||||
|
|
||||||
def test_change_feed_url_marks_entries_as_read() -> None:
|
|
||||||
"""After changing a feed URL all entries on the new feed should be marked read to prevent resending."""
|
|
||||||
new_feed_url = "https://lovinator.space/rss_test_small.xml"
|
|
||||||
|
|
||||||
# Ensure feeds do not already exist.
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
client.post(url="/remove", data={"feed_url": new_feed_url})
|
|
||||||
|
|
||||||
# Ensure webhook exists.
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
|
||||||
client.post(url="/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url})
|
|
||||||
|
|
||||||
# Add the original feed.
|
|
||||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
|
||||||
assert response.status_code == 200, f"Failed to add feed: {response.text}"
|
|
||||||
|
|
||||||
# Patch reader on the main module so we can observe calls.
|
|
||||||
mock_entry_a = MagicMock()
|
|
||||||
mock_entry_a.id = "entry-a"
|
|
||||||
mock_entry_b = MagicMock()
|
|
||||||
mock_entry_b.id = "entry-b"
|
|
||||||
|
|
||||||
real_reader = main_module.get_reader_dependency()
|
|
||||||
|
|
||||||
# Use a no-redirect client so the POST response is inspected directly; the
|
|
||||||
# redirect target (/feed?feed_url=…) would 404 because change_feed_url is mocked.
|
|
||||||
no_redirect_client = TestClient(app, follow_redirects=False)
|
|
||||||
|
|
||||||
with (
|
|
||||||
patch.object(real_reader, "get_entries", return_value=[mock_entry_a, mock_entry_b]) as mock_get_entries,
|
|
||||||
patch.object(real_reader, "set_entry_read") as mock_set_read,
|
|
||||||
patch.object(real_reader, "update_feed") as mock_update_feed,
|
|
||||||
patch.object(real_reader, "change_feed_url"),
|
|
||||||
):
|
|
||||||
response = no_redirect_client.post(
|
|
||||||
url="/change_feed_url",
|
|
||||||
data={"old_feed_url": feed_url, "new_feed_url": new_feed_url},
|
|
||||||
)
|
|
||||||
assert response.status_code == 303, f"Expected 303 redirect, got {response.status_code}: {response.text}"
|
|
||||||
|
|
||||||
# update_feed should have been called with the new URL.
|
|
||||||
mock_update_feed.assert_called_once_with(new_feed_url)
|
|
||||||
|
|
||||||
# get_entries should have been called to fetch unread entries on the new URL.
|
|
||||||
mock_get_entries.assert_called_once_with(feed=new_feed_url, read=False)
|
|
||||||
|
|
||||||
# Every returned entry should have been marked as read.
|
|
||||||
assert mock_set_read.call_count == 2, f"Expected 2 set_entry_read calls, got {mock_set_read.call_count}"
|
|
||||||
mock_set_read.assert_any_call(mock_entry_a, True)
|
|
||||||
mock_set_read.assert_any_call(mock_entry_b, True)
|
|
||||||
|
|
||||||
# Cleanup.
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
client.post(url="/remove", data={"feed_url": new_feed_url})
|
|
||||||
|
|
||||||
|
|
||||||
def test_change_feed_url_empty_old_url_returns_400() -> None:
|
|
||||||
"""Submitting an empty old_feed_url should return HTTP 400."""
|
|
||||||
response: Response = client.post(
|
|
||||||
url="/change_feed_url",
|
|
||||||
data={"old_feed_url": " ", "new_feed_url": "https://example.com/feed.xml"},
|
|
||||||
)
|
|
||||||
assert response.status_code == 400, f"Expected 400 for empty old URL, got {response.status_code}"
|
|
||||||
|
|
||||||
|
|
||||||
def test_change_feed_url_empty_new_url_returns_400() -> None:
|
|
||||||
"""Submitting a blank new_feed_url should return HTTP 400."""
|
|
||||||
response: Response = client.post(
|
|
||||||
url="/change_feed_url",
|
|
||||||
data={"old_feed_url": feed_url, "new_feed_url": " "},
|
|
||||||
)
|
|
||||||
assert response.status_code == 400, f"Expected 400 for blank new URL, got {response.status_code}"
|
|
||||||
|
|
||||||
|
|
||||||
def test_change_feed_url_nonexistent_old_url_returns_404() -> None:
|
|
||||||
"""Trying to rename a feed that does not exist should return HTTP 404."""
|
|
||||||
non_existent = "https://does-not-exist.example.com/rss.xml"
|
|
||||||
# Make sure it really is absent.
|
|
||||||
client.post(url="/remove", data={"feed_url": non_existent})
|
|
||||||
|
|
||||||
response: Response = client.post(
|
|
||||||
url="/change_feed_url",
|
|
||||||
data={"old_feed_url": non_existent, "new_feed_url": "https://example.com/new.xml"},
|
|
||||||
)
|
|
||||||
assert response.status_code == 404, f"Expected 404 for non-existent feed, got {response.status_code}"
|
|
||||||
|
|
||||||
|
|
||||||
def test_change_feed_url_new_url_already_exists_returns_409() -> None:
|
|
||||||
"""Changing to a URL that is already tracked should return HTTP 409."""
|
|
||||||
second_feed_url = "https://lovinator.space/rss_test_small.xml"
|
|
||||||
|
|
||||||
# Ensure both feeds are absent.
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
client.post(url="/remove", data={"feed_url": second_feed_url})
|
|
||||||
|
|
||||||
# Ensure webhook exists.
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
|
||||||
client.post(url="/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url})
|
|
||||||
|
|
||||||
# Add both feeds.
|
|
||||||
client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
|
||||||
client.post(url="/add", data={"feed_url": second_feed_url, "webhook_dropdown": webhook_name})
|
|
||||||
|
|
||||||
# Try to rename one to the other.
|
|
||||||
response: Response = client.post(
|
|
||||||
url="/change_feed_url",
|
|
||||||
data={"old_feed_url": feed_url, "new_feed_url": second_feed_url},
|
|
||||||
)
|
|
||||||
assert response.status_code == 409, f"Expected 409 when new URL already exists, got {response.status_code}"
|
|
||||||
|
|
||||||
# Cleanup.
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
client.post(url="/remove", data={"feed_url": second_feed_url})
|
|
||||||
|
|
||||||
|
|
||||||
def test_change_feed_url_same_url_redirects_without_error() -> None:
|
|
||||||
"""Changing a feed's URL to itself should redirect cleanly without any error."""
|
|
||||||
# Ensure webhook exists.
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
|
||||||
client.post(url="/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url})
|
|
||||||
|
|
||||||
# Add the feed.
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
|
||||||
assert response.status_code == 200, f"Failed to add feed: {response.text}"
|
|
||||||
|
|
||||||
# Submit the same URL as both old and new.
|
|
||||||
response = client.post(
|
|
||||||
url="/change_feed_url",
|
|
||||||
data={"old_feed_url": feed_url, "new_feed_url": feed_url},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200, f"Expected 200 redirect for same URL, got {response.status_code}"
|
|
||||||
|
|
||||||
# Feed should still be accessible.
|
|
||||||
response = client.get(url="/feed", params={"feed_url": feed_url})
|
|
||||||
assert response.status_code == 200, f"Feed should still exist after no-op URL change: {response.text}"
|
|
||||||
|
|
||||||
# Cleanup.
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_webhook() -> None:
|
def test_delete_webhook() -> None:
|
||||||
"""Test the /delete_webhook page."""
|
"""Test the /delete_webhook page."""
|
||||||
# Remove the feed if it already exists before we run the test.
|
# Remove the feed if it already exists before we run the test.
|
||||||
|
|
@ -485,110 +335,6 @@ def test_update_feed_not_found() -> None:
|
||||||
assert "Feed not found" in response.text
|
assert "Feed not found" in response.text
|
||||||
|
|
||||||
|
|
||||||
def test_post_entry_send_to_discord() -> None:
|
|
||||||
"""Test that /post_entry sends an entry to Discord and redirects to the feed page.
|
|
||||||
|
|
||||||
Regression test for the bug where the injected reader was not passed to
|
|
||||||
send_entry_to_discord, meaning the dependency-injected reader was silently ignored.
|
|
||||||
"""
|
|
||||||
# Ensure webhook and feed exist.
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
|
||||||
response: Response = client.post(
|
|
||||||
url="/add_webhook",
|
|
||||||
data={"webhook_name": webhook_name, "webhook_url": webhook_url},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200, f"Failed to add webhook: {response.text}"
|
|
||||||
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
|
||||||
assert response.status_code == 200, f"Failed to add feed: {response.text}"
|
|
||||||
|
|
||||||
# Retrieve an entry from the feed to get a valid entry ID.
|
|
||||||
reader: main_module.Reader = main_module.get_reader_dependency()
|
|
||||||
entries: list[Entry] = list(reader.get_entries(feed=feed_url, limit=1))
|
|
||||||
assert entries, "Feed should have at least one entry to send"
|
|
||||||
entry_to_send: main_module.Entry = entries[0]
|
|
||||||
encoded_id: str = urllib.parse.quote(entry_to_send.id)
|
|
||||||
|
|
||||||
no_redirect_client = TestClient(app, follow_redirects=False)
|
|
||||||
|
|
||||||
# Patch execute_webhook so no real HTTP requests are made to Discord.
|
|
||||||
with patch("discord_rss_bot.feeds.execute_webhook") as mock_execute:
|
|
||||||
response = no_redirect_client.get(
|
|
||||||
url="/post_entry",
|
|
||||||
params={"entry_id": encoded_id, "feed_url": urllib.parse.quote(feed_url)},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 303, f"Expected redirect after sending, got {response.status_code}: {response.text}"
|
|
||||||
location: str = response.headers.get("location", "")
|
|
||||||
assert "feed?feed_url=" in location, f"Should redirect to feed page, got: {location}"
|
|
||||||
assert mock_execute.called, "execute_webhook should have been called to deliver the entry to Discord"
|
|
||||||
|
|
||||||
# Cleanup.
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
|
|
||||||
|
|
||||||
def test_post_entry_unknown_id_returns_404() -> None:
|
|
||||||
"""Test that /post_entry returns 404 when the entry ID does not exist."""
|
|
||||||
response: Response = client.get(
|
|
||||||
url="/post_entry",
|
|
||||||
params={"entry_id": "https://nonexistent.example.com/entry-that-does-not-exist"},
|
|
||||||
)
|
|
||||||
assert response.status_code == 404, f"Expected 404 for unknown entry, got {response.status_code}"
|
|
||||||
|
|
||||||
|
|
||||||
def test_post_entry_uses_feed_url_to_disambiguate_duplicate_ids() -> None:
|
|
||||||
"""When IDs collide across feeds, /post_entry should pick the entry from provided feed_url."""
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class DummyFeed:
|
|
||||||
url: str
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class DummyEntry:
|
|
||||||
id: str
|
|
||||||
feed: DummyFeed
|
|
||||||
feed_url: str
|
|
||||||
|
|
||||||
feed_a = "https://example.com/feed-a.xml"
|
|
||||||
feed_b = "https://example.com/feed-b.xml"
|
|
||||||
shared_id = "https://example.com/shared-entry-id"
|
|
||||||
|
|
||||||
entry_a: Entry = cast("Entry", DummyEntry(id=shared_id, feed=DummyFeed(feed_a), feed_url=feed_a))
|
|
||||||
entry_b: Entry = cast("Entry", DummyEntry(id=shared_id, feed=DummyFeed(feed_b), feed_url=feed_b))
|
|
||||||
|
|
||||||
class StubReader:
|
|
||||||
def get_entries(self, feed: str | None = None) -> list[Entry]:
|
|
||||||
if feed == feed_a:
|
|
||||||
return [entry_a]
|
|
||||||
if feed == feed_b:
|
|
||||||
return [entry_b]
|
|
||||||
return [entry_a, entry_b]
|
|
||||||
|
|
||||||
selected_feed_urls: list[str] = []
|
|
||||||
|
|
||||||
def fake_send_entry_to_discord(entry: Entry, reader: object) -> None:
|
|
||||||
selected_feed_urls.append(entry.feed.url)
|
|
||||||
|
|
||||||
app.dependency_overrides[get_reader_dependency] = StubReader
|
|
||||||
no_redirect_client = TestClient(app, follow_redirects=False)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with patch("discord_rss_bot.main.send_entry_to_discord", side_effect=fake_send_entry_to_discord):
|
|
||||||
response: Response = no_redirect_client.get(
|
|
||||||
url="/post_entry",
|
|
||||||
params={"entry_id": urllib.parse.quote(shared_id), "feed_url": urllib.parse.quote(feed_b)},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 303, f"Expected redirect after sending, got {response.status_code}"
|
|
||||||
assert selected_feed_urls == [feed_b], f"Expected feed-b entry, got: {selected_feed_urls}"
|
|
||||||
|
|
||||||
location = response.headers.get("location", "")
|
|
||||||
assert urllib.parse.quote(feed_b) in location, f"Expected redirect to feed-b page, got: {location}"
|
|
||||||
finally:
|
|
||||||
app.dependency_overrides = {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_navbar_backup_link_hidden_when_not_configured(monkeypatch: pytest.MonkeyPatch) -> None:
|
def test_navbar_backup_link_hidden_when_not_configured(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
"""Test that the backup link is not shown in the navbar when GIT_BACKUP_PATH is not set."""
|
"""Test that the backup link is not shown in the navbar when GIT_BACKUP_PATH is not set."""
|
||||||
# Ensure GIT_BACKUP_PATH is not set
|
# Ensure GIT_BACKUP_PATH is not set
|
||||||
|
|
@ -827,24 +573,11 @@ def test_create_html_marks_entries_from_another_feed(monkeypatch: pytest.MonkeyP
|
||||||
original_feed_url="https://example.com/feed-b.xml",
|
original_feed_url="https://example.com/feed-b.xml",
|
||||||
)
|
)
|
||||||
|
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr("discord_rss_bot.main.replace_tags_in_text_message", lambda _entry: "Rendered content")
|
||||||
"discord_rss_bot.main.replace_tags_in_text_message",
|
monkeypatch.setattr("discord_rss_bot.main.entry_is_blacklisted", lambda _entry: False)
|
||||||
lambda _entry, **_kwargs: "Rendered content",
|
monkeypatch.setattr("discord_rss_bot.main.entry_is_whitelisted", lambda _entry: False)
|
||||||
)
|
|
||||||
monkeypatch.setattr("discord_rss_bot.main.entry_is_blacklisted", lambda _entry, **_kwargs: False)
|
|
||||||
monkeypatch.setattr("discord_rss_bot.main.entry_is_whitelisted", lambda _entry, **_kwargs: False)
|
|
||||||
|
|
||||||
same_feed_entry_typed: Entry = cast("Entry", same_feed_entry)
|
html = create_html_for_feed(cast("list[Entry]", [same_feed_entry, other_feed_entry]), selected_feed_url)
|
||||||
other_feed_entry_typed: Entry = cast("Entry", other_feed_entry)
|
|
||||||
|
|
||||||
html: str = create_html_for_feed(
|
|
||||||
reader=MagicMock(),
|
|
||||||
current_feed_url=selected_feed_url,
|
|
||||||
entries=[
|
|
||||||
same_feed_entry_typed,
|
|
||||||
other_feed_entry_typed,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
assert "From another feed: https://example.com/feed-b.xml" in html
|
assert "From another feed: https://example.com/feed-b.xml" in html
|
||||||
assert "From another feed: https://example.com/feed-a.xml" not in html
|
assert "From another feed: https://example.com/feed-a.xml" not in html
|
||||||
|
|
@ -887,32 +620,6 @@ def test_webhook_entries_no_feeds() -> None:
|
||||||
assert "No feeds found" in response.text or "Add feeds" in response.text, "Expected message about no feeds"
|
assert "No feeds found" in response.text or "Add feeds" in response.text, "Expected message about no feeds"
|
||||||
|
|
||||||
|
|
||||||
def test_webhook_entries_no_feeds_still_shows_webhook_settings() -> None:
|
|
||||||
"""The webhook detail view should show settings/actions even with no attached feeds."""
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
|
||||||
response: Response = client.post(
|
|
||||||
url="/add_webhook",
|
|
||||||
data={"webhook_name": webhook_name, "webhook_url": webhook_url},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200, f"Failed to add webhook: {response.text}"
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
url="/webhook_entries",
|
|
||||||
params={"webhook_url": webhook_url},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200, f"Failed to get /webhook_entries: {response.text}"
|
|
||||||
assert "Settings" in response.text, "Expected settings card on webhook detail view"
|
|
||||||
assert "Modify Webhook" in response.text, "Expected modify form on webhook detail view"
|
|
||||||
assert "Delete Webhook" in response.text, "Expected delete action on webhook detail view"
|
|
||||||
assert "Back to dashboard" in response.text, "Expected dashboard navigation link"
|
|
||||||
assert "All webhooks" in response.text, "Expected all webhooks navigation link"
|
|
||||||
assert f'name="old_hook" value="{webhook_url}"' in response.text, "Expected old_hook hidden input"
|
|
||||||
assert f'value="/webhook_entries?webhook_url={urllib.parse.quote(webhook_url)}"' in response.text, (
|
|
||||||
"Expected modify form to redirect back to the current webhook detail view"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_webhook_entries_with_feeds_no_entries() -> None:
|
def test_webhook_entries_with_feeds_no_entries() -> None:
|
||||||
"""Test webhook_entries endpoint when webhook has feeds but no entries yet."""
|
"""Test webhook_entries endpoint when webhook has feeds but no entries yet."""
|
||||||
# Clean up and create fresh webhook
|
# Clean up and create fresh webhook
|
||||||
|
|
@ -974,38 +681,6 @@ def test_webhook_entries_with_entries() -> None:
|
||||||
assert webhook_name in response.text, "Webhook name not found in response"
|
assert webhook_name in response.text, "Webhook name not found in response"
|
||||||
# Should show entries (the feed has entries)
|
# Should show entries (the feed has entries)
|
||||||
assert "total from" in response.text, "Expected to see entry count"
|
assert "total from" in response.text, "Expected to see entry count"
|
||||||
assert "Modify Webhook" in response.text, "Expected webhook settings to be visible"
|
|
||||||
assert "Attached feeds" in response.text, "Expected attached feeds section to be visible"
|
|
||||||
|
|
||||||
|
|
||||||
def test_webhook_entries_shows_attached_feed_link() -> None:
|
|
||||||
"""The webhook detail view should list attached feeds linking to their feed pages."""
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
|
||||||
response: Response = client.post(
|
|
||||||
url="/add_webhook",
|
|
||||||
data={"webhook_name": webhook_name, "webhook_url": webhook_url},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200, f"Failed to add webhook: {response.text}"
|
|
||||||
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
response = client.post(
|
|
||||||
url="/add",
|
|
||||||
data={"feed_url": feed_url, "webhook_dropdown": webhook_name},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200, f"Failed to add feed: {response.text}"
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
url="/webhook_entries",
|
|
||||||
params={"webhook_url": webhook_url},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200, f"Failed to get /webhook_entries: {response.text}"
|
|
||||||
assert f"/feed?feed_url={urllib.parse.quote(feed_url)}" in response.text, (
|
|
||||||
"Expected attached feed to link to its feed detail page"
|
|
||||||
)
|
|
||||||
assert "Latest entries" in response.text, "Expected latest entries heading on webhook detail view"
|
|
||||||
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
|
|
||||||
|
|
||||||
def test_webhook_entries_multiple_feeds() -> None:
|
def test_webhook_entries_multiple_feeds() -> None:
|
||||||
|
|
@ -1041,75 +716,6 @@ def test_webhook_entries_multiple_feeds() -> None:
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
client.post(url="/remove", data={"feed_url": feed_url})
|
||||||
|
|
||||||
|
|
||||||
def test_webhook_entries_sort_newest_and_non_null_published_first() -> None:
|
|
||||||
"""Webhook entries should be sorted newest-first with published=None entries placed last."""
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class DummyFeed:
|
|
||||||
url: str
|
|
||||||
title: str | None = None
|
|
||||||
updates_enabled: bool = True
|
|
||||||
last_exception: None = None
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class DummyEntry:
|
|
||||||
id: str
|
|
||||||
feed: DummyFeed
|
|
||||||
published: datetime | None
|
|
||||||
|
|
||||||
dummy_feed = DummyFeed(url="https://example.com/feed.xml", title="Example Feed")
|
|
||||||
|
|
||||||
# Intentionally unsorted input with two dated entries and two undated entries.
|
|
||||||
unsorted_entries: list[Entry] = [
|
|
||||||
cast("Entry", DummyEntry(id="old", feed=dummy_feed, published=datetime(2024, 1, 1, tzinfo=UTC))),
|
|
||||||
cast("Entry", DummyEntry(id="none-1", feed=dummy_feed, published=None)),
|
|
||||||
cast("Entry", DummyEntry(id="new", feed=dummy_feed, published=datetime(2024, 2, 1, tzinfo=UTC))),
|
|
||||||
cast("Entry", DummyEntry(id="none-2", feed=dummy_feed, published=None)),
|
|
||||||
]
|
|
||||||
|
|
||||||
class StubReader:
|
|
||||||
def get_tag(self, resource: object, key: str, default: object = None) -> object:
|
|
||||||
if resource == () and key == "webhooks":
|
|
||||||
return [{"name": webhook_name, "url": webhook_url}]
|
|
||||||
if key == "webhook" and isinstance(resource, str):
|
|
||||||
return webhook_url
|
|
||||||
return default
|
|
||||||
|
|
||||||
def get_feeds(self) -> list[DummyFeed]:
|
|
||||||
return [dummy_feed]
|
|
||||||
|
|
||||||
def get_entries(self, **_kwargs: object) -> list[Entry]:
|
|
||||||
return unsorted_entries
|
|
||||||
|
|
||||||
observed_order: list[str] = []
|
|
||||||
|
|
||||||
def capture_entries(*, reader: object, entries: list[Entry], current_feed_url: str = "") -> str:
|
|
||||||
del reader, current_feed_url
|
|
||||||
observed_order.extend(entry.id for entry in entries)
|
|
||||||
return ""
|
|
||||||
|
|
||||||
app.dependency_overrides[get_reader_dependency] = StubReader
|
|
||||||
try:
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"discord_rss_bot.main.get_data_from_hook_url",
|
|
||||||
return_value=main_module.WebhookInfo(custom_name=webhook_name, url=webhook_url),
|
|
||||||
),
|
|
||||||
patch("discord_rss_bot.main.create_html_for_feed", side_effect=capture_entries),
|
|
||||||
):
|
|
||||||
response: Response = client.get(
|
|
||||||
url="/webhook_entries",
|
|
||||||
params={"webhook_url": webhook_url},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200, f"Failed to get /webhook_entries: {response.text}"
|
|
||||||
assert observed_order == ["new", "old", "none-1", "none-2"], (
|
|
||||||
"Expected newest published entries first and published=None entries last"
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
app.dependency_overrides = {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_webhook_entries_pagination() -> None:
|
def test_webhook_entries_pagination() -> None:
|
||||||
"""Test webhook_entries endpoint pagination functionality."""
|
"""Test webhook_entries endpoint pagination functionality."""
|
||||||
# Clean up and create webhook
|
# Clean up and create webhook
|
||||||
|
|
@ -1177,445 +783,3 @@ def test_webhook_entries_url_encoding() -> None:
|
||||||
|
|
||||||
# Clean up
|
# Clean up
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
client.post(url="/remove", data={"feed_url": feed_url})
|
||||||
|
|
||||||
|
|
||||||
def test_dashboard_webhook_name_links_to_webhook_detail() -> None:
|
|
||||||
"""Webhook names on the dashboard should open the webhook detail view."""
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
|
||||||
response: Response = client.post(
|
|
||||||
url="/add_webhook",
|
|
||||||
data={"webhook_name": webhook_name, "webhook_url": webhook_url},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200, f"Failed to add webhook: {response.text}"
|
|
||||||
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
|
||||||
assert response.status_code == 200, f"Failed to add feed: {response.text}"
|
|
||||||
|
|
||||||
response = client.get(url="/")
|
|
||||||
assert response.status_code == 200, f"Failed to get /: {response.text}"
|
|
||||||
|
|
||||||
expected_link = f"/webhook_entries?webhook_url={urllib.parse.quote(webhook_url)}"
|
|
||||||
assert expected_link in response.text, "Expected dashboard webhook link to point to the webhook detail view"
|
|
||||||
|
|
||||||
client.post(url="/remove", data={"feed_url": feed_url})
|
|
||||||
|
|
||||||
|
|
||||||
def test_modify_webhook_redirects_back_to_webhook_detail() -> None:
|
|
||||||
"""Webhook updates from the detail view should redirect back to that view with the new URL."""
|
|
||||||
original_webhook_url = "https://discord.com/api/webhooks/1234567890/abcdefghijklmnopqrstuvwxyz"
|
|
||||||
new_webhook_url = "https://discord.com/api/webhooks/1234567890/updated-token"
|
|
||||||
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": original_webhook_url})
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": new_webhook_url})
|
|
||||||
|
|
||||||
|
|
||||||
def test_modify_webhook_triggers_git_backup_commit() -> None:
|
|
||||||
"""Modifying a webhook URL should record a state change for git backup."""
|
|
||||||
original_webhook_url = "https://discord.com/api/webhooks/1234567890/abcdefghijklmnopqrstuvwxyz"
|
|
||||||
new_webhook_url = "https://discord.com/api/webhooks/1234567890/updated-token"
|
|
||||||
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": original_webhook_url})
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": new_webhook_url})
|
|
||||||
|
|
||||||
response: Response = client.post(
|
|
||||||
url="/add_webhook",
|
|
||||||
data={"webhook_name": webhook_name, "webhook_url": original_webhook_url},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200, f"Failed to add webhook: {response.text}"
|
|
||||||
|
|
||||||
no_redirect_client = TestClient(app, follow_redirects=False)
|
|
||||||
with patch("discord_rss_bot.main.commit_state_change") as mock_commit_state_change:
|
|
||||||
response = no_redirect_client.post(
|
|
||||||
url="/modify_webhook",
|
|
||||||
data={
|
|
||||||
"old_hook": original_webhook_url,
|
|
||||||
"new_hook": new_webhook_url,
|
|
||||||
"redirect_to": f"/webhook_entries?webhook_url={urllib.parse.quote(original_webhook_url)}",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 303, f"Expected 303 redirect, got {response.status_code}: {response.text}"
|
|
||||||
assert mock_commit_state_change.call_count == 1, "Expected webhook modification to trigger git backup commit"
|
|
||||||
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": new_webhook_url})
|
|
||||||
|
|
||||||
response = client.post(
|
|
||||||
url="/add_webhook",
|
|
||||||
data={"webhook_name": webhook_name, "webhook_url": original_webhook_url},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200, f"Failed to add webhook: {response.text}"
|
|
||||||
|
|
||||||
no_redirect_client = TestClient(app, follow_redirects=False)
|
|
||||||
response = no_redirect_client.post(
|
|
||||||
url="/modify_webhook",
|
|
||||||
data={
|
|
||||||
"old_hook": original_webhook_url,
|
|
||||||
"new_hook": new_webhook_url,
|
|
||||||
"redirect_to": f"/webhook_entries?webhook_url={urllib.parse.quote(original_webhook_url)}",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 303, f"Expected 303 redirect, got {response.status_code}: {response.text}"
|
|
||||||
assert response.headers["location"] == (f"/webhook_entries?webhook_url={urllib.parse.quote(new_webhook_url)}"), (
|
|
||||||
f"Unexpected redirect location: {response.headers['location']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
client.post(url="/delete_webhook", data={"webhook_url": new_webhook_url})
|
|
||||||
|
|
||||||
|
|
||||||
def test_webhook_entries_mass_update_preview_shows_old_and_new_urls() -> None:
|
|
||||||
"""Preview should list old->new feed URLs for webhook bulk replacement."""
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class DummyFeed:
|
|
||||||
url: str
|
|
||||||
title: str | None = None
|
|
||||||
updates_enabled: bool = True
|
|
||||||
last_exception: None = None
|
|
||||||
|
|
||||||
class StubReader:
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._feeds: list[DummyFeed] = [
|
|
||||||
DummyFeed(url="https://old.example.com/rss/a.xml", title="A"),
|
|
||||||
DummyFeed(url="https://old.example.com/rss/b.xml", title="B"),
|
|
||||||
DummyFeed(url="https://unchanged.example.com/rss/c.xml", title="C"),
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_tag(self, resource: object, key: str, default: object = None) -> object:
|
|
||||||
if resource == () and key == "webhooks":
|
|
||||||
return [{"name": webhook_name, "url": webhook_url}]
|
|
||||||
if key == "webhook" and isinstance(resource, str):
|
|
||||||
if resource.startswith("https://old.example.com"):
|
|
||||||
return webhook_url
|
|
||||||
if resource.startswith("https://unchanged.example.com"):
|
|
||||||
return webhook_url
|
|
||||||
return default
|
|
||||||
|
|
||||||
def get_feeds(self) -> list[DummyFeed]:
|
|
||||||
return self._feeds
|
|
||||||
|
|
||||||
def get_entries(self, **_kwargs: object) -> list[Entry]:
|
|
||||||
return []
|
|
||||||
|
|
||||||
app.dependency_overrides[get_reader_dependency] = StubReader
|
|
||||||
try:
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"discord_rss_bot.main.get_data_from_hook_url",
|
|
||||||
return_value=main_module.WebhookInfo(custom_name=webhook_name, url=webhook_url),
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"discord_rss_bot.main.resolve_final_feed_url",
|
|
||||||
side_effect=lambda url: (url.replace("old.example.com", "new.example.com"), None),
|
|
||||||
),
|
|
||||||
):
|
|
||||||
response: Response = client.get(
|
|
||||||
url="/webhook_entries",
|
|
||||||
params={
|
|
||||||
"webhook_url": webhook_url,
|
|
||||||
"replace_from": "old.example.com",
|
|
||||||
"replace_to": "new.example.com",
|
|
||||||
"resolve_urls": "true",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200, f"Failed to get preview: {response.text}"
|
|
||||||
assert "Mass update feed URLs" in response.text
|
|
||||||
assert "old.example.com/rss/a.xml" in response.text
|
|
||||||
assert "new.example.com/rss/a.xml" in response.text
|
|
||||||
assert "Will update" in response.text
|
|
||||||
assert "Matched: 2" in response.text
|
|
||||||
assert "Will update: 2" in response.text
|
|
||||||
finally:
|
|
||||||
app.dependency_overrides = {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_bulk_change_feed_urls_updates_matching_feeds() -> None:
|
|
||||||
"""Mass updater should change all matching feed URLs for a webhook."""
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class DummyFeed:
|
|
||||||
url: str
|
|
||||||
|
|
||||||
class StubReader:
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._feeds = [
|
|
||||||
DummyFeed(url="https://old.example.com/rss/a.xml"),
|
|
||||||
DummyFeed(url="https://old.example.com/rss/b.xml"),
|
|
||||||
DummyFeed(url="https://unchanged.example.com/rss/c.xml"),
|
|
||||||
]
|
|
||||||
self.change_calls: list[tuple[str, str]] = []
|
|
||||||
self.updated_feeds: list[str] = []
|
|
||||||
|
|
||||||
def get_tag(self, resource: object, key: str, default: object = None) -> object:
|
|
||||||
if resource == () and key == "webhooks":
|
|
||||||
return [{"name": webhook_name, "url": webhook_url}]
|
|
||||||
if key == "webhook" and isinstance(resource, str):
|
|
||||||
return webhook_url
|
|
||||||
return default
|
|
||||||
|
|
||||||
def get_feeds(self) -> list[DummyFeed]:
|
|
||||||
return self._feeds
|
|
||||||
|
|
||||||
def change_feed_url(self, old_url: str, new_url: str) -> None:
|
|
||||||
self.change_calls.append((old_url, new_url))
|
|
||||||
|
|
||||||
def update_feed(self, feed_url: str) -> None:
|
|
||||||
self.updated_feeds.append(feed_url)
|
|
||||||
|
|
||||||
def get_entries(self, **_kwargs: object) -> list[Entry]:
|
|
||||||
return []
|
|
||||||
|
|
||||||
def set_entry_read(self, _entry: Entry, _value: bool) -> None: # noqa: FBT001
|
|
||||||
return
|
|
||||||
|
|
||||||
stub_reader = StubReader()
|
|
||||||
app.dependency_overrides[get_reader_dependency] = lambda: stub_reader
|
|
||||||
no_redirect_client = TestClient(app, follow_redirects=False)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with patch(
|
|
||||||
"discord_rss_bot.main.resolve_final_feed_url",
|
|
||||||
side_effect=lambda url: (url.replace("old.example.com", "new.example.com"), None),
|
|
||||||
):
|
|
||||||
response: Response = no_redirect_client.post(
|
|
||||||
url="/bulk_change_feed_urls",
|
|
||||||
data={
|
|
||||||
"webhook_url": webhook_url,
|
|
||||||
"replace_from": "old.example.com",
|
|
||||||
"replace_to": "new.example.com",
|
|
||||||
"resolve_urls": "true",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 303, f"Expected redirect, got {response.status_code}: {response.text}"
|
|
||||||
assert "Updated%202%20feed%20URL%28s%29" in response.headers.get("location", "")
|
|
||||||
assert sorted(stub_reader.change_calls) == sorted([
|
|
||||||
("https://old.example.com/rss/a.xml", "https://new.example.com/rss/a.xml"),
|
|
||||||
("https://old.example.com/rss/b.xml", "https://new.example.com/rss/b.xml"),
|
|
||||||
])
|
|
||||||
assert sorted(stub_reader.updated_feeds) == sorted([
|
|
||||||
"https://new.example.com/rss/a.xml",
|
|
||||||
"https://new.example.com/rss/b.xml",
|
|
||||||
])
|
|
||||||
finally:
|
|
||||||
app.dependency_overrides = {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_webhook_entries_mass_update_preview_fragment_endpoint() -> None:
|
|
||||||
"""HTMX preview endpoint should render only the mass-update preview fragment."""
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class DummyFeed:
|
|
||||||
url: str
|
|
||||||
title: str | None = None
|
|
||||||
updates_enabled: bool = True
|
|
||||||
last_exception: None = None
|
|
||||||
|
|
||||||
class StubReader:
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._feeds: list[DummyFeed] = [
|
|
||||||
DummyFeed(url="https://old.example.com/rss/a.xml", title="A"),
|
|
||||||
DummyFeed(url="https://old.example.com/rss/b.xml", title="B"),
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_tag(self, resource: object, key: str, default: object = None) -> object:
|
|
||||||
if key == "webhook" and isinstance(resource, str):
|
|
||||||
return webhook_url
|
|
||||||
return default
|
|
||||||
|
|
||||||
def get_feeds(self) -> list[DummyFeed]:
|
|
||||||
return self._feeds
|
|
||||||
|
|
||||||
app.dependency_overrides[get_reader_dependency] = StubReader
|
|
||||||
try:
|
|
||||||
with patch(
|
|
||||||
"discord_rss_bot.main.resolve_final_feed_url",
|
|
||||||
side_effect=lambda url: (url.replace("old.example.com", "new.example.com"), None),
|
|
||||||
):
|
|
||||||
response: Response = client.get(
|
|
||||||
url="/webhook_entries_mass_update_preview",
|
|
||||||
params={
|
|
||||||
"webhook_url": webhook_url,
|
|
||||||
"replace_from": "old.example.com",
|
|
||||||
"replace_to": "new.example.com",
|
|
||||||
"resolve_urls": "true",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200, f"Failed to get HTMX preview fragment: {response.text}"
|
|
||||||
assert "Will update: 2" in response.text
|
|
||||||
assert "<table" in response.text
|
|
||||||
assert "Mass update feed URLs" not in response.text, "Fragment should not include full page wrapper text"
|
|
||||||
finally:
|
|
||||||
app.dependency_overrides = {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_bulk_change_feed_urls_force_update_overwrites_conflict() -> None: # noqa: C901
|
|
||||||
"""Force update should overwrite conflicting target URLs instead of skipping them."""
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class DummyFeed:
|
|
||||||
url: str
|
|
||||||
|
|
||||||
class StubReader:
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._feeds = [
|
|
||||||
DummyFeed(url="https://old.example.com/rss/a.xml"),
|
|
||||||
DummyFeed(url="https://new.example.com/rss/a.xml"),
|
|
||||||
]
|
|
||||||
self.delete_calls: list[str] = []
|
|
||||||
self.change_calls: list[tuple[str, str]] = []
|
|
||||||
|
|
||||||
def get_tag(self, resource: object, key: str, default: object = None) -> object:
|
|
||||||
if resource == () and key == "webhooks":
|
|
||||||
return [{"name": webhook_name, "url": webhook_url}]
|
|
||||||
if key == "webhook" and isinstance(resource, str):
|
|
||||||
return webhook_url
|
|
||||||
return default
|
|
||||||
|
|
||||||
def get_feeds(self) -> list[DummyFeed]:
|
|
||||||
return self._feeds
|
|
||||||
|
|
||||||
def delete_feed(self, feed_url: str) -> None:
|
|
||||||
self.delete_calls.append(feed_url)
|
|
||||||
|
|
||||||
def change_feed_url(self, old_url: str, new_url: str) -> None:
|
|
||||||
self.change_calls.append((old_url, new_url))
|
|
||||||
|
|
||||||
def update_feed(self, _feed_url: str) -> None:
|
|
||||||
return
|
|
||||||
|
|
||||||
def get_entries(self, **_kwargs: object) -> list[Entry]:
|
|
||||||
return []
|
|
||||||
|
|
||||||
def set_entry_read(self, _entry: Entry, _value: bool) -> None: # noqa: FBT001
|
|
||||||
return
|
|
||||||
|
|
||||||
stub_reader = StubReader()
|
|
||||||
app.dependency_overrides[get_reader_dependency] = lambda: stub_reader
|
|
||||||
no_redirect_client = TestClient(app, follow_redirects=False)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with patch(
|
|
||||||
"discord_rss_bot.main.resolve_final_feed_url",
|
|
||||||
side_effect=lambda url: (url.replace("old.example.com", "new.example.com"), None),
|
|
||||||
):
|
|
||||||
response: Response = no_redirect_client.post(
|
|
||||||
url="/bulk_change_feed_urls",
|
|
||||||
data={
|
|
||||||
"webhook_url": webhook_url,
|
|
||||||
"replace_from": "old.example.com",
|
|
||||||
"replace_to": "new.example.com",
|
|
||||||
"resolve_urls": "true",
|
|
||||||
"force_update": "true",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 303, f"Expected redirect, got {response.status_code}: {response.text}"
|
|
||||||
assert stub_reader.delete_calls == ["https://new.example.com/rss/a.xml"]
|
|
||||||
assert stub_reader.change_calls == [
|
|
||||||
(
|
|
||||||
"https://old.example.com/rss/a.xml",
|
|
||||||
"https://new.example.com/rss/a.xml",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
assert "Force%20overwrote%201" in response.headers.get("location", "")
|
|
||||||
finally:
|
|
||||||
app.dependency_overrides = {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_bulk_change_feed_urls_force_update_ignores_resolution_error() -> None:
|
|
||||||
"""Force update should proceed even when URL resolution returns an error (e.g. HTTP 404)."""
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class DummyFeed:
|
|
||||||
url: str
|
|
||||||
|
|
||||||
class StubReader:
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._feeds = [
|
|
||||||
DummyFeed(url="https://old.example.com/rss/a.xml"),
|
|
||||||
]
|
|
||||||
self.change_calls: list[tuple[str, str]] = []
|
|
||||||
|
|
||||||
def get_tag(self, resource: object, key: str, default: object = None) -> object:
|
|
||||||
if resource == () and key == "webhooks":
|
|
||||||
return [{"name": webhook_name, "url": webhook_url}]
|
|
||||||
if key == "webhook" and isinstance(resource, str):
|
|
||||||
return webhook_url
|
|
||||||
return default
|
|
||||||
|
|
||||||
def get_feeds(self) -> list[DummyFeed]:
|
|
||||||
return self._feeds
|
|
||||||
|
|
||||||
def change_feed_url(self, old_url: str, new_url: str) -> None:
|
|
||||||
self.change_calls.append((old_url, new_url))
|
|
||||||
|
|
||||||
def update_feed(self, _feed_url: str) -> None:
|
|
||||||
return
|
|
||||||
|
|
||||||
def get_entries(self, **_kwargs: object) -> list[Entry]:
|
|
||||||
return []
|
|
||||||
|
|
||||||
def set_entry_read(self, _entry: Entry, _value: bool) -> None: # noqa: FBT001
|
|
||||||
return
|
|
||||||
|
|
||||||
stub_reader = StubReader()
|
|
||||||
app.dependency_overrides[get_reader_dependency] = lambda: stub_reader
|
|
||||||
no_redirect_client = TestClient(app, follow_redirects=False)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with patch(
|
|
||||||
"discord_rss_bot.main.resolve_final_feed_url",
|
|
||||||
return_value=("https://new.example.com/rss/a.xml", "HTTP 404"),
|
|
||||||
):
|
|
||||||
response: Response = no_redirect_client.post(
|
|
||||||
url="/bulk_change_feed_urls",
|
|
||||||
data={
|
|
||||||
"webhook_url": webhook_url,
|
|
||||||
"replace_from": "old.example.com",
|
|
||||||
"replace_to": "new.example.com",
|
|
||||||
"resolve_urls": "true",
|
|
||||||
"force_update": "true",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 303, f"Expected redirect, got {response.status_code}: {response.text}"
|
|
||||||
assert stub_reader.change_calls == [
|
|
||||||
(
|
|
||||||
"https://old.example.com/rss/a.xml",
|
|
||||||
"https://new.example.com/rss/a.xml",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
location = response.headers.get("location", "")
|
|
||||||
assert "Updated%201%20feed%20URL%28s%29" in location
|
|
||||||
assert "Failed%200" in location
|
|
||||||
finally:
|
|
||||||
app.dependency_overrides = {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_reader_dependency_override_is_used() -> None:
|
|
||||||
"""Reader should be injectable and overridable via FastAPI dependency overrides."""
|
|
||||||
|
|
||||||
class StubReader:
|
|
||||||
def get_tag(self, _resource: str, _key: str, default: str | None = None) -> str | None:
|
|
||||||
"""Stub get_tag that always returns the default value.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
_resource: Ignored.
|
|
||||||
_key: Ignored.
|
|
||||||
default: The value to return.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The default value, simulating a missing tag.
|
|
||||||
"""
|
|
||||||
return default
|
|
||||||
|
|
||||||
app.dependency_overrides[get_reader_dependency] = StubReader
|
|
||||||
try:
|
|
||||||
response: Response = client.get(url="/add")
|
|
||||||
assert response.status_code == 200, f"Expected /add to render with overridden reader: {response.text}"
|
|
||||||
finally:
|
|
||||||
app.dependency_overrides = {}
|
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,7 @@ def test_create_search_context() -> None:
|
||||||
reader.update_search()
|
reader.update_search()
|
||||||
|
|
||||||
# Create the search context.
|
# Create the search context.
|
||||||
context: dict = create_search_context("test", reader=reader)
|
context: dict = create_search_context("test", custom_reader=reader)
|
||||||
assert context is not None, f"The context should not be None. Got: {context}"
|
assert context is not None, f"The context should not be None. Got: {context}"
|
||||||
|
|
||||||
# Close the reader, so we can delete the directory.
|
# Close the reader, so we can delete the directory.
|
||||||
|
|
|
||||||
|
|
@ -22,12 +22,12 @@ def test_reader() -> None:
|
||||||
Path.mkdir(Path(temp_dir), exist_ok=True)
|
Path.mkdir(Path(temp_dir), exist_ok=True)
|
||||||
|
|
||||||
custom_loc: pathlib.Path = pathlib.Path(temp_dir, "custom_loc_db.sqlite")
|
custom_loc: pathlib.Path = pathlib.Path(temp_dir, "custom_loc_db.sqlite")
|
||||||
reader: Reader = get_reader(custom_location=str(custom_loc))
|
custom_reader: Reader = get_reader(custom_location=str(custom_loc))
|
||||||
assert_msg = f"The custom reader should be an instance of Reader. But it was '{type(reader)}'."
|
assert_msg = f"The custom reader should be an instance of Reader. But it was '{type(custom_reader)}'."
|
||||||
assert isinstance(reader, Reader), assert_msg
|
assert isinstance(custom_reader, Reader), assert_msg
|
||||||
|
|
||||||
# Close the reader, so we can delete the directory.
|
# Close the reader, so we can delete the directory.
|
||||||
reader.close()
|
custom_reader.close()
|
||||||
|
|
||||||
|
|
||||||
def test_data_dir() -> None:
|
def test_data_dir() -> None:
|
||||||
|
|
@ -49,16 +49,16 @@ def test_get_webhook_for_entry() -> None:
|
||||||
Path.mkdir(Path(temp_dir), exist_ok=True)
|
Path.mkdir(Path(temp_dir), exist_ok=True)
|
||||||
|
|
||||||
custom_loc: pathlib.Path = pathlib.Path(temp_dir, "custom_loc_db.sqlite")
|
custom_loc: pathlib.Path = pathlib.Path(temp_dir, "custom_loc_db.sqlite")
|
||||||
reader: Reader = get_reader(custom_location=str(custom_loc))
|
custom_reader: Reader = get_reader(custom_location=str(custom_loc))
|
||||||
|
|
||||||
# Add a feed to the database.
|
# Add a feed to the database.
|
||||||
reader.add_feed("https://www.reddit.com/r/movies.rss")
|
custom_reader.add_feed("https://www.reddit.com/r/movies.rss")
|
||||||
reader.update_feed("https://www.reddit.com/r/movies.rss")
|
custom_reader.update_feed("https://www.reddit.com/r/movies.rss")
|
||||||
|
|
||||||
# Add a webhook to the database.
|
# Add a webhook to the database.
|
||||||
reader.set_tag("https://www.reddit.com/r/movies.rss", "webhook", "https://example.com") # pyright: ignore[reportArgumentType]
|
custom_reader.set_tag("https://www.reddit.com/r/movies.rss", "webhook", "https://example.com") # pyright: ignore[reportArgumentType]
|
||||||
our_tag = reader.get_tag("https://www.reddit.com/r/movies.rss", "webhook") # pyright: ignore[reportArgumentType]
|
our_tag = custom_reader.get_tag("https://www.reddit.com/r/movies.rss", "webhook") # pyright: ignore[reportArgumentType]
|
||||||
assert our_tag == "https://example.com", f"The tag should be 'https://example.com'. But it was '{our_tag}'."
|
assert our_tag == "https://example.com", f"The tag should be 'https://example.com'. But it was '{our_tag}'."
|
||||||
|
|
||||||
# Close the reader, so we can delete the directory.
|
# Close the reader, so we can delete the directory.
|
||||||
reader.close()
|
custom_reader.close()
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,7 @@ def test_has_white_tags() -> None:
|
||||||
reader.update_feeds()
|
reader.update_feeds()
|
||||||
|
|
||||||
# Test feed without any whitelist tags
|
# Test feed without any whitelist tags
|
||||||
assert has_white_tags(reader=get_reader(), feed=feed) is False, "Feed should not have any whitelist tags"
|
assert has_white_tags(custom_reader=get_reader(), feed=feed) is False, "Feed should not have any whitelist tags"
|
||||||
|
|
||||||
check_if_has_tag(reader, feed, "whitelist_title")
|
check_if_has_tag(reader, feed, "whitelist_title")
|
||||||
check_if_has_tag(reader, feed, "whitelist_summary")
|
check_if_has_tag(reader, feed, "whitelist_summary")
|
||||||
|
|
@ -56,9 +56,9 @@ def test_has_white_tags() -> None:
|
||||||
|
|
||||||
def check_if_has_tag(reader: Reader, feed: Feed, whitelist_name: str) -> None:
|
def check_if_has_tag(reader: Reader, feed: Feed, whitelist_name: str) -> None:
|
||||||
reader.set_tag(feed, whitelist_name, "a") # pyright: ignore[reportArgumentType]
|
reader.set_tag(feed, whitelist_name, "a") # pyright: ignore[reportArgumentType]
|
||||||
assert has_white_tags(reader=reader, feed=feed) is True, "Feed should have whitelist tags"
|
assert has_white_tags(custom_reader=reader, feed=feed) is True, "Feed should have whitelist tags"
|
||||||
reader.delete_tag(feed, whitelist_name)
|
reader.delete_tag(feed, whitelist_name)
|
||||||
assert has_white_tags(reader=reader, feed=feed) is False, "Feed should not have any whitelist tags"
|
assert has_white_tags(custom_reader=reader, feed=feed) is False, "Feed should not have any whitelist tags"
|
||||||
|
|
||||||
|
|
||||||
def test_should_be_sent() -> None:
|
def test_should_be_sent() -> None:
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue