diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b30555f..3e5f892 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ repos: # Some out-of-the-box hooks for pre-commit. - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-added-large-files - id: check-ast @@ -33,14 +33,14 @@ repos: # Run Pyupgrade on all Python files. This will upgrade the code to Python 3.12. - repo: https://github.com/asottile/pyupgrade - rev: v3.17.0 + rev: v3.19.0 hooks: - id: pyupgrade args: ["--py312-plus"] # An extremely fast Python linter and formatter. - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.7 + rev: v0.7.1 hooks: - id: ruff-format - id: ruff @@ -48,6 +48,6 @@ repos: # Static checker for GitHub Actions workflow files. - repo: https://github.com/rhysd/actionlint - rev: v1.7.1 + rev: v1.7.3 hooks: - id: actionlint diff --git a/README.md b/README.md index 7c5cb74..849fb98 100644 --- a/README.md +++ b/README.md @@ -55,3 +55,8 @@ This is not recommended if you don't have an init system (e.g., systemd) - You can stop the bot with Ctrl + c. Note: You will need to run `poetry install` again if [poetry.lock](poetry.lock) has been modified. + +## Contact + +Email: [mailto:tlovinator@gmail.com](tlovinator@gmail.com) +Discord: TheLovinator#9276 diff --git a/discord_rss_bot/custom_filters.py b/discord_rss_bot/custom_filters.py index 7b1f19c..090aef2 100644 --- a/discord_rss_bot/custom_filters.py +++ b/discord_rss_bot/custom_filters.py @@ -3,7 +3,7 @@ from functools import lru_cache from reader import Entry, Reader -from discord_rss_bot.filter.blacklist import has_black_tags, should_be_skipped +from discord_rss_bot.filter.blacklist import entry_should_be_skipped, feed_has_blacklist_tags from discord_rss_bot.filter.whitelist import has_white_tags, should_be_sent from discord_rss_bot.settings import get_reader @@ -24,7 +24,7 @@ def encode_url(url_to_quote: str) -> str: Returns: The encoded url. """ - return urllib.parse.quote(url_to_quote) if url_to_quote else "" + return urllib.parse.quote(string=url_to_quote) if url_to_quote else "" def entry_is_whitelisted(entry_to_check: Entry) -> bool: @@ -50,4 +50,6 @@ def entry_is_blacklisted(entry_to_check: Entry) -> bool: bool: True if the feed is blacklisted, False otherwise. """ - return bool(has_black_tags(reader, entry_to_check.feed) and should_be_skipped(reader, entry_to_check)) + return bool( + feed_has_blacklist_tags(reader, entry_to_check.feed) and entry_should_be_skipped(reader, entry_to_check), + ) diff --git a/discord_rss_bot/custom_message.py b/discord_rss_bot/custom_message.py index bf04daf..e63d059 100644 --- a/discord_rss_bot/custom_message.py +++ b/discord_rss_bot/custom_message.py @@ -42,6 +42,7 @@ def try_to_replace(custom_message: str, template: str, replace_with: str) -> str try: return custom_message.replace(template, replace_with) except (TypeError, AttributeError, ValueError): + logger.exception("Failed to replace %s with %s in %s", template, replace_with, custom_message) return custom_message @@ -78,39 +79,52 @@ def replace_tags_in_text_message(entry: Entry) -> str: summary = summary.replace("[https://", "[") summary = summary.replace("[https://www.", "[") - list_of_replacements = [ - {"{{feed_author}}": feed.author}, - {"{{feed_added}}": feed.added}, - {"{{feed_last_exception}}": feed.last_exception}, - {"{{feed_last_updated}}": feed.last_updated}, - {"{{feed_link}}": feed.link}, - {"{{feed_subtitle}}": feed.subtitle}, - {"{{feed_title}}": feed.title}, - {"{{feed_updated}}": feed.updated}, - {"{{feed_updates_enabled}}": str(feed.updates_enabled)}, - {"{{feed_url}}": feed.url}, - {"{{feed_user_title}}": feed.user_title}, - {"{{feed_version}}": feed.version}, - {"{{entry_added}}": entry.added}, - {"{{entry_author}}": entry.author}, + feed_added: str = feed.added.strftime("%Y-%m-%d %H:%M:%S") if feed.added else "Never" + feed_last_exception: str = feed.last_exception.value_str if feed.last_exception else "" + feed_last_updated: str = feed.last_updated.strftime("%Y-%m-%d %H:%M:%S") if feed.last_updated else "Never" + feed_updated: str = feed.updated.strftime("%Y-%m-%d %H:%M:%S") if feed.updated else "Never" + entry_added: str = entry.added.strftime("%Y-%m-%d %H:%M:%S") if entry.added else "Never" + entry_published: str = entry.published.strftime("%Y-%m-%d %H:%M:%S") if entry.published else "Never" + entry_read_modified: str = entry.read_modified.strftime("%Y-%m-%d %H:%M:%S") if entry.read_modified else "Never" + entry_updated: str = entry.updated.strftime("%Y-%m-%d %H:%M:%S") if entry.updated else "Never" + + list_of_replacements: list[dict[str, str]] = [ + {"{{feed_author}}": feed.author or ""}, + {"{{feed_added}}": feed_added}, + {"{{feed_last_exception}}": feed_last_exception}, + {"{{feed_last_updated}}": feed_last_updated}, + {"{{feed_link}}": feed.link or ""}, + {"{{feed_subtitle}}": feed.subtitle or ""}, + {"{{feed_title}}": feed.title or ""}, + {"{{feed_updated}}": feed_updated}, + {"{{feed_updates_enabled}}": str(feed.updates_enabled) or ""}, + {"{{feed_url}}": feed.url or ""}, + {"{{feed_user_title}}": feed.user_title or ""}, + {"{{feed_version}}": feed.version or ""}, + {"{{entry_added}}": entry_added}, + {"{{entry_author}}": entry.author or ""}, {"{{entry_content}}": content}, {"{{entry_content_raw}}": entry.content[0].value if entry.content else ""}, - {"{{entry_id}}": entry.id}, - {"{{entry_important}}": str(entry.important)}, - {"{{entry_link}}": entry.link}, - {"{{entry_published}}": entry.published}, - {"{{entry_read}}": str(entry.read)}, - {"{{entry_read_modified}}": entry.read_modified}, + {"{{entry_id}}": entry.id or ""}, + {"{{entry_important}}": str(entry.important) or ""}, + {"{{entry_link}}": entry.link or ""}, + {"{{entry_published}}": entry_published}, + {"{{entry_read}}": str(entry.read) or ""}, + {"{{entry_read_modified}}": entry_read_modified}, {"{{entry_summary}}": summary}, {"{{entry_summary_raw}}": entry.summary or ""}, {"{{entry_text}}": summary or content}, - {"{{entry_title}}": entry.title}, - {"{{entry_updated}}": entry.updated}, + {"{{entry_title}}": entry.title or ""}, + {"{{entry_updated}}": entry_updated}, {"{{image_1}}": first_image}, ] for replacement in list_of_replacements: for template, replace_with in replacement.items(): + if not isinstance(replace_with, str): + logger.error("replace_with is not a string: %s, it is a %s", replace_with, type(replace_with)) + continue + custom_message = try_to_replace(custom_message, template, replace_with) return custom_message.replace("\\n", "\n") @@ -200,9 +214,18 @@ def replace_tags_in_embed(feed: Feed, entry: Entry) -> CustomEmbed: embed.author_name = embed.title embed.title = "" + feed_added: str = feed.added.strftime("%Y-%m-%d %H:%M:%S") if feed.added else "Never" + feed_last_exception: str = feed.last_exception.value_str if feed.last_exception else "" + feed_last_updated: str = feed.last_updated.strftime("%Y-%m-%d %H:%M:%S") if feed.last_updated else "Never" + feed_updated: str = feed.updated.strftime("%Y-%m-%d %H:%M:%S") if feed.updated else "Never" + entry_published: str = entry.published.strftime("%Y-%m-%d %H:%M:%S") if entry.published else "Never" + entry_read_modified: str = entry.read_modified.strftime("%Y-%m-%d %H:%M:%S") if entry.read_modified else "Never" + entry_updated: str = entry.updated.strftime("%Y-%m-%d %H:%M:%S") if entry.updated else "Never" + list_of_replacements: list[dict[str, str]] = [ {"{{feed_author}}": feed.author or ""}, {"{{feed_added}}": feed_added or ""}, + {"{{feed_last_exception}}": feed_last_exception}, {"{{feed_last_updated}}": feed_last_updated or ""}, {"{{feed_link}}": feed.link or ""}, {"{{feed_subtitle}}": feed.subtitle or ""}, @@ -308,7 +331,9 @@ def get_embed(custom_reader: Reader, feed: Feed) -> CustomEmbed: Returns: Returns the contents from the embed tag. """ - if embed := custom_reader.get_tag(feed, "embed", ""): + embed = custom_reader.get_tag(feed, "embed", "") + + if embed: if not isinstance(embed, str): return get_embed_data(embed) # type: ignore embed_data: dict[str, str | int] = json.loads(embed) diff --git a/discord_rss_bot/feeds.py b/discord_rss_bot/feeds.py index 6ac372a..aff8bee 100644 --- a/discord_rss_bot/feeds.py +++ b/discord_rss_bot/feeds.py @@ -7,12 +7,13 @@ from typing import TYPE_CHECKING from discord_webhook import DiscordEmbed, DiscordWebhook from fastapi import HTTPException -from reader import Entry, EntryNotFoundError, Feed, FeedExistsError, Reader, StorageError, TagNotFoundError +from reader import Entry, EntryNotFoundError, Feed, FeedExistsError, Reader, ReaderError, StorageError, TagNotFoundError from discord_rss_bot import custom_message -from discord_rss_bot.filter.blacklist import should_be_skipped +from discord_rss_bot.filter.blacklist import entry_should_be_skipped from discord_rss_bot.filter.whitelist import has_white_tags, should_be_sent from discord_rss_bot.is_url_valid import is_url_valid +from discord_rss_bot.missing_tags import add_missing_tags from discord_rss_bot.settings import default_custom_message, get_reader if TYPE_CHECKING: @@ -50,15 +51,21 @@ def send_entry_to_discord(entry: Entry, custom_reader: Reader | None = None) -> webhook_message = "No message found." # Create the webhook. - if bool(reader.get_tag(entry.feed, "should_send_embed")): + try: + should_send_embed = bool(reader.get_tag(entry.feed, "should_send_embed")) + except TagNotFoundError: + logger.exception("No should_send_embed tag found for feed: %s", entry.feed.url) + should_send_embed = True + except StorageError: + logger.exception("Error getting should_send_embed tag for feed: %s", entry.feed.url) + should_send_embed = True + + if should_send_embed: webhook = create_embed_webhook(webhook_url, entry) else: webhook: DiscordWebhook = DiscordWebhook(url=webhook_url, content=webhook_message, rate_limit_retry=True) - response: Response = webhook.execute() - if response.status_code not in {200, 204}: - logger.error("Error sending entry to Discord: %s\n%s", response.text, pprint.pformat(webhook.json)) - return f"Error sending entry to Discord: {response.text}" + execute_webhook(webhook, entry) return None @@ -159,7 +166,43 @@ def create_embed_webhook(webhook_url: str, entry: Entry) -> DiscordWebhook: return webhook -def send_to_discord(custom_reader: Reader | None = None, feed: Feed | None = None, *, do_once: bool = False) -> None: # noqa: PLR0912 +def get_webhook_url(reader: Reader, entry: Entry) -> str: + """Get the webhook URL for the entry. + + Args: + reader: The reader to use. + entry: The entry to get the webhook URL for. + + Returns: + str: The webhook URL. + """ + try: + webhook_url: str = str(reader.get_tag(entry.feed_url, "webhook")) + except TagNotFoundError: + logger.exception("No webhook URL found for feed: %s", entry.feed.url) + return "" + except StorageError: + logger.exception("Storage error getting webhook URL for feed: %s", entry.feed.url) + return "" + return webhook_url + + +def set_entry_as_read(reader: Reader, entry: Entry) -> None: + """Set the webhook to read, so we don't send it again. + + Args: + reader: The reader to use. + entry: The entry to set as read. + """ + try: + reader.set_entry_read(entry, True) + except EntryNotFoundError: + logger.exception("Error setting entry to read: %s", entry.id) + except StorageError: + logger.exception("Error setting entry to read: %s", entry.id) + + +def send_to_discord(custom_reader: Reader | None = None, feed: Feed | None = None, *, do_once: bool = False) -> None: """Send entries to Discord. If response was not ok, we will log the error and mark the entry as unread, so it will be sent again next time. @@ -178,27 +221,19 @@ def send_to_discord(custom_reader: Reader | None = None, feed: Feed | None = Non # Loop through the unread entries. entries: Iterable[Entry] = reader.get_entries(feed=feed, read=False) for entry in entries: + set_entry_as_read(reader, entry) + if entry.added < datetime.datetime.now(tz=entry.added.tzinfo) - datetime.timedelta(days=1): logger.info("Entry is older than 24 hours: %s from %s", entry.id, entry.feed.url) - reader.set_entry_read(entry, True) continue - # Set the webhook to read, so we don't send it again. - try: - reader.set_entry_read(entry, True) - except EntryNotFoundError: - logger.exception("Error setting entry to read: %s", entry.id) - continue - except StorageError: - logger.exception("Error setting entry to read: %s", entry.id) - continue - - # Get the webhook URL for the entry. If it is None, we will continue to the next entry. - webhook_url: str = str(reader.get_tag(entry.feed_url, "webhook", "")) + webhook_url: str = get_webhook_url(reader, entry) if not webhook_url: + logger.info("No webhook URL found for feed: %s", entry.feed.url) continue - if bool(reader.get_tag(entry.feed, "should_send_embed")): + should_send_embed: bool = should_send_embed_check(reader, entry) + if should_send_embed: webhook = create_embed_webhook(webhook_url, entry) else: # If the user has set the custom message to an empty string, we will use the default message, otherwise we @@ -208,49 +243,87 @@ def send_to_discord(custom_reader: Reader | None = None, feed: Feed | None = Non else: webhook_message: str = str(default_custom_message) - # Its actually 4096, but we will use 4000 to be safe. - max_content_length: int = 4000 - webhook_message = ( - f"{webhook_message[:max_content_length]}..." - if len(webhook_message) > max_content_length - else webhook_message - ) + webhook_message = truncate_webhook_message(webhook_message) # Create the webhook. webhook: DiscordWebhook = DiscordWebhook(url=webhook_url, content=webhook_message, rate_limit_retry=True) - # Check if the entry is blacklisted, if it is, mark it as read and continue. - if should_be_skipped(reader, entry): + # Check if the entry is blacklisted, and if it is, we will skip it. + if entry_should_be_skipped(reader, entry): logger.info("Entry was blacklisted: %s", entry.id) - reader.set_entry_read(entry, True) continue # Check if the feed has a whitelist, and if it does, check if the entry is whitelisted. if has_white_tags(reader, entry.feed): if should_be_sent(reader, entry): - response: Response = webhook.execute() - if response.status_code not in {200, 204}: - logger.error("Error sending entry to Discord: %s\n%s", response.text, pprint.pformat(webhook.json)) - - reader.set_entry_read(entry, True) + execute_webhook(webhook, entry) return - reader.set_entry_read(entry, True) continue - # It was not blacklisted, and not forced through whitelist, so we will send it to Discord. - response: Response = webhook.execute() - if response.status_code not in {200, 204}: - logger.error("Error sending entry to Discord: %s\n%s", response.text, pprint.pformat(webhook.json)) - reader.set_entry_read(entry, True) - return + # Send the entry to Discord as it is not blacklisted or feed has a whitelist. + execute_webhook(webhook, entry) # If we only want to send one entry, we will break the loop. This is used when testing this function. if do_once: - logger.info("Sent one entry to Discord.") + logger.info("Sent one entry to Discord. Breaking the loop.") break - # Update the search index. - reader.update_search() + +def execute_webhook(webhook: DiscordWebhook, entry: Entry) -> None: + """Execute the webhook. + + Args: + webhook (DiscordWebhook): The webhook to execute. + entry (Entry): The entry to send to Discord. + + """ + response: Response = webhook.execute() + if response.status_code not in {200, 204}: + msg: str = f"Error sending entry to Discord: {response.text}\n{pprint.pformat(webhook.json)}" + if entry: + msg += f"\n{entry}" + + logger.error(msg) + else: + logger.info("Sent entry to Discord: %s", entry.id) + + +def should_send_embed_check(reader: Reader, entry: Entry) -> bool: + """Check if we should send an embed to Discord. + + Args: + reader (Reader): The reader to use. + entry (Entry): The entry to check. + + Returns: + bool: True if we should send an embed, False otherwise. + """ + try: + should_send_embed = bool(reader.get_tag(entry.feed, "should_send_embed")) + except TagNotFoundError: + logger.exception("No should_send_embed tag found for feed: %s", entry.feed.url) + should_send_embed = True + except ReaderError: + logger.exception("Error getting should_send_embed tag for feed: %s", entry.feed.url) + should_send_embed = True + + return should_send_embed + + +def truncate_webhook_message(webhook_message: str) -> str: + """Truncate the webhook message if it is too long. + + Args: + webhook_message (str): The webhook message to truncate. + + Returns: + str: The truncated webhook message. + """ + max_content_length: int = 4000 + if len(webhook_message) > max_content_length: + half_length = (max_content_length - 3) // 2 # Subtracting 3 for the "..." in the middle + webhook_message = f"{webhook_message[:half_length]}...{webhook_message[-half_length:]}" + return webhook_message def create_feed(reader: Reader, feed_url: str, webhook_dropdown: str) -> None: @@ -277,7 +350,6 @@ def create_feed(reader: Reader, feed_url: str, webhook_dropdown: str) -> None: raise HTTPException(status_code=404, detail="Webhook not found") try: - # TODO(TheLovinator): Check if the feed is valid reader.add_feed(clean_feed_url) except FeedExistsError: # Add the webhook to an already added feed if it doesn't have a webhook instead of trying to create a new. @@ -285,8 +357,13 @@ def create_feed(reader: Reader, feed_url: str, webhook_dropdown: str) -> None: reader.get_tag(clean_feed_url, "webhook") except TagNotFoundError: reader.set_tag(clean_feed_url, "webhook", webhook_url) # type: ignore + except ReaderError as e: + raise HTTPException(status_code=404, detail=f"Error adding feed: {e}") from e - reader.update_feed(clean_feed_url) + try: + reader.update_feed(clean_feed_url) + except ReaderError as e: + raise HTTPException(status_code=404, detail=f"Error updating feed: {e}") from e # Mark every entry as read, so we don't send all the old entries to Discord. entries: Iterable[Entry] = reader.get_entries(feed=clean_feed_url, read=False) @@ -305,3 +382,5 @@ def create_feed(reader: Reader, feed_url: str, webhook_dropdown: str) -> None: # Update the full-text search index so our new feed is searchable. reader.update_search() + + add_missing_tags(reader) diff --git a/discord_rss_bot/filter/blacklist.py b/discord_rss_bot/filter/blacklist.py index 8daf687..aabd21b 100644 --- a/discord_rss_bot/filter/blacklist.py +++ b/discord_rss_bot/filter/blacklist.py @@ -3,7 +3,7 @@ from reader import Entry, Feed, Reader from discord_rss_bot.filter.utils import is_word_in_text -def has_black_tags(custom_reader: Reader, feed: Feed) -> bool: +def feed_has_blacklist_tags(custom_reader: Reader, feed: Feed) -> bool: """Return True if the feed has blacklist tags. The following tags are checked: @@ -25,7 +25,7 @@ def has_black_tags(custom_reader: Reader, feed: Feed) -> bool: return bool(blacklist_title or blacklist_summary or blacklist_content) -def should_be_skipped(custom_reader: Reader, entry: Entry) -> bool: +def entry_should_be_skipped(custom_reader: Reader, entry: Entry) -> bool: """Return True if the entry is in the blacklist. Args: @@ -35,11 +35,10 @@ def should_be_skipped(custom_reader: Reader, entry: Entry) -> bool: Returns: bool: If the entry is in the blacklist. """ - feed: Feed = entry.feed - blacklist_title: str = str(custom_reader.get_tag(feed, "blacklist_title", "")) - blacklist_summary: str = str(custom_reader.get_tag(feed, "blacklist_summary", "")) - blacklist_content: str = str(custom_reader.get_tag(feed, "blacklist_content", "")) - blacklist_author: str = str(custom_reader.get_tag(feed, "blacklist_author", "")) + blacklist_title: str = str(custom_reader.get_tag(entry.feed, "blacklist_title", "")) + blacklist_summary: str = str(custom_reader.get_tag(entry.feed, "blacklist_summary", "")) + blacklist_content: str = str(custom_reader.get_tag(entry.feed, "blacklist_content", "")) + blacklist_author: str = str(custom_reader.get_tag(entry.feed, "blacklist_author", "")) # TODO(TheLovinator): Also add support for entry_text and more. if entry.title and blacklist_title and is_word_in_text(blacklist_title, entry.title): diff --git a/discord_rss_bot/filter/utils.py b/discord_rss_bot/filter/utils.py index 8eed08f..090518d 100644 --- a/discord_rss_bot/filter/utils.py +++ b/discord_rss_bot/filter/utils.py @@ -3,23 +3,20 @@ from __future__ import annotations import re -def is_word_in_text(words: str, text: str) -> bool: - """Check if the word is in the text. +def is_word_in_text(word_string: str, text: str) -> bool: + """Check if any of the words are in the text. Args: - words: The words to search for. + word_string: A comma-separated string of words to search for. text: The text to search in. Returns: - bool: If the word is in the text. + bool: True if any word is found in the text, otherwise False. """ - # Split the word list into a list of words. - word_list: list[str] = words.split(",") + word_list: list[str] = word_string.split(",") - # Check if each word is in the text. - for word in word_list: - look_for: str = rf"(^|[^\w]){word}([^\w]|$)" - pattern: re.Pattern[str] = re.compile(look_for, re.IGNORECASE) - if re.search(pattern, text): - return True - return False + # Compile regex patterns for each word. + patterns: list[re.Pattern[str]] = [re.compile(rf"(^|[^\w]){word}([^\w]|$)", re.IGNORECASE) for word in word_list] + + # Check if any pattern matches the text. + return any(pattern.search(text) for pattern in patterns) diff --git a/discord_rss_bot/main.py b/discord_rss_bot/main.py index 0cf932a..c98f4bd 100644 --- a/discord_rss_bot/main.py +++ b/discord_rss_bot/main.py @@ -20,13 +20,12 @@ from fastapi.staticfiles import StaticFiles from fastapi.templating import Jinja2Templates from httpx import Response from markdownify import markdownify -from reader import Entry, Feed, FeedNotFoundError, Reader, TagNotFoundError +from reader import Entry, EntryNotFoundError, Feed, FeedNotFoundError, Reader, TagNotFoundError from reader.types import JSONType from starlette.responses import RedirectResponse from discord_rss_bot import settings from discord_rss_bot.custom_filters import ( - encode_url, entry_is_blacklisted, entry_is_whitelisted, ) @@ -42,7 +41,6 @@ from discord_rss_bot.feeds import create_feed, send_entry_to_discord, send_to_di from discord_rss_bot.missing_tags import add_missing_tags from discord_rss_bot.search import create_html_for_search_results from discord_rss_bot.settings import get_reader -from discord_rss_bot.webhook import add_webhook, remove_webhook if TYPE_CHECKING: from collections.abc import Iterable @@ -90,7 +88,7 @@ reader: Reader = get_reader() @asynccontextmanager async def lifespan(app: FastAPI) -> typing.AsyncGenerator[None]: """This is needed for the ASGI server to run.""" - add_missing_tags(reader=reader) + add_missing_tags(reader) scheduler: AsyncIOScheduler = AsyncIOScheduler() # Update all feeds every 15 minutes. @@ -109,7 +107,7 @@ templates: Jinja2Templates = Jinja2Templates(directory="discord_rss_bot/template # Add the filters to the Jinja2 environment so they can be used in html templates. -templates.env.filters["encode_url"] = encode_url +templates.env.filters["encode_url"] = lambda url: urllib.parse.quote(url) if url else "" templates.env.filters["entry_is_whitelisted"] = entry_is_whitelisted templates.env.filters["entry_is_blacklisted"] = entry_is_blacklisted templates.env.filters["discord_markdown"] = markdownify @@ -126,11 +124,32 @@ async def post_add_webhook( webhook_name: The name of the webhook. webhook_url: The url of the webhook. + Raises: + HTTPException: If the webhook already exists. + Returns: RedirectResponse: Redirect to the index page. """ - add_webhook(reader, webhook_name, webhook_url) - return RedirectResponse(url="/", status_code=303) + # Get current webhooks from the database if they exist otherwise use an empty list. + webhooks = list(reader.get_tag((), "webhooks", [])) + + # Webhooks are stored as a list of dictionaries. + # Example: [{"name": "webhook_name", "url": "webhook_url"}] + webhooks = cast(list[dict[str, str]], webhooks) + + # Only add the webhook if it doesn't already exist. + stripped_webhook_name = webhook_name.strip() + if all(webhook["name"] != stripped_webhook_name for webhook in webhooks): + # Add the new webhook to the list of webhooks. + webhooks.append({"name": webhook_name.strip(), "url": webhook_url.strip()}) + + reader.set_tag((), "webhooks", webhooks) # type: ignore + + return RedirectResponse(url="/", status_code=303) + + # TODO(TheLovinator): Show this error on the page. + # TODO(TheLovinator): Replace HTTPException with WebhookAlreadyExistsError. + raise HTTPException(status_code=409, detail="Webhook already exists") @app.post("/delete_webhook") @@ -140,11 +159,37 @@ async def post_delete_webhook(webhook_url: Annotated[str, Form()]) -> RedirectRe Args: webhook_url: The url of the webhook. + Raises: + HTTPException: If the webhook could not be deleted + Returns: RedirectResponse: Redirect to the index page. """ # TODO(TheLovinator): Check if the webhook is in use by any feeds before deleting it. - remove_webhook(reader, webhook_url) + # TODO(TheLovinator): Replace HTTPException with a custom exception for both of these. + # Get current webhooks from the database if they exist otherwise use an empty list. + webhooks = list(reader.get_tag((), "webhooks", [])) + + # Webhooks are stored as a list of dictionaries. + # Example: [{"name": "webhook_name", "url": "webhook_url"}] + webhooks = cast(list[dict[str, str]], webhooks) + + # Only add the webhook if it doesn't already exist. + webhooks_to_remove: list[dict[str, str]] = [ + webhook for webhook in webhooks if webhook["url"] == webhook_url.strip() + ] + + # Remove the webhooks outside the loop. + for webhook in webhooks_to_remove: + webhooks.remove(webhook) + + # Check if any webhooks were removed. + if not all(webhook not in webhooks for webhook in webhooks_to_remove): + raise HTTPException(status_code=500, detail="Webhook could not be deleted") + + # Add our new list of webhooks to the database. + reader.set_tag((), "webhooks", webhooks) # type: ignore + return RedirectResponse(url="/", status_code=303) @@ -515,7 +560,7 @@ def get_add(request: Request): @app.get("/feed", response_class=HTMLResponse) -async def get_feed(feed_url: str, request: Request, starting_after: str | None = None): +async def get_feed(feed_url: str, request: Request, starting_after: str = ""): """Get a feed by URL. Args: @@ -523,15 +568,65 @@ async def get_feed(feed_url: str, request: Request, starting_after: str | None = request: The request object. starting_after: The entry to start after. Used for pagination. + Raises: + HTTPException: If the feed is not found. + Returns: HTMLResponse: The feed page. """ + entries_per_page: int = 20 + clean_feed_url: str = urllib.parse.unquote(feed_url.strip()) - feed: Feed = reader.get_feed(clean_feed_url) + try: + feed: Feed = reader.get_feed(clean_feed_url) + except FeedNotFoundError as e: + raise HTTPException(status_code=404, detail=f"Feed '{clean_feed_url}' not found.\n\n{e}") from e + + # Only show button if more than 10 entries. + total_entries: int = reader.get_entry_counts(feed=feed).total or 0 + show_more_entires_button: bool = total_entries > entries_per_page # Get entries from the feed. - entries: typing.Iterable[Entry] = reader.get_entries(feed=clean_feed_url, limit=10) + if starting_after: + try: + start_after_entry: Entry | None = reader.get_entry((str(feed.url), starting_after)) + except FeedNotFoundError as e: + raise HTTPException(status_code=404, detail=f"Feed '{clean_feed_url}' not found.\n\n{e}") from e + except EntryNotFoundError as e: + current_entries = list(reader.get_entries(feed=clean_feed_url)) + msg: str = f"{e}\n\n{[entry.id for entry in current_entries]}" + html: str = create_html_for_feed(current_entries) + + context = { + "request": request, + "feed": feed, + "entries": current_entries, + "feed_counts": reader.get_feed_counts(feed=clean_feed_url), + "html": html, + "should_send_embed": False, + "last_entry": None, + "messages": msg, + "show_more_entires_button": show_more_entires_button, + "total_entries": total_entries, + } + return templates.TemplateResponse(request=request, name="feed.html", context=context) + + else: + start_after_entry = None + + entries: typing.Iterable[Entry] = reader.get_entries( + feed=clean_feed_url, + starting_after=start_after_entry, + limit=entries_per_page, + ) + + entries = list(entries) + + # Get the last entry. + last_entry: Entry | None = None + if entries: + last_entry = entries[-1] # Create the html for the entries. html: str = create_html_for_feed(entries) @@ -549,47 +644,9 @@ async def get_feed(feed_url: str, request: Request, starting_after: str | None = "feed_counts": reader.get_feed_counts(feed=clean_feed_url), "html": html, "should_send_embed": should_send_embed, - "show_more_button": True, - } - return templates.TemplateResponse(request=request, name="feed.html", context=context) - - -@app.get("/feed_more", response_class=HTMLResponse) -async def get_all_entries(feed_url: str, request: Request): - """Get a feed by URL and show more entries. - - Args: - feed_url: The feed to add. - request: The request object. - starting_after: The entry to start after. Used for pagination. - - Returns: - HTMLResponse: The feed page. - """ - clean_feed_url: str = urllib.parse.unquote(feed_url.strip()) - - feed: Feed = reader.get_feed(clean_feed_url) - - # Get entries from the feed. - entries: typing.Iterable[Entry] = reader.get_entries(feed=clean_feed_url, limit=200) - - # Create the html for the entries. - html: str = create_html_for_feed(entries) - - try: - should_send_embed: bool = bool(reader.get_tag(feed, "should_send_embed")) - except TagNotFoundError: - add_missing_tags(reader) - should_send_embed: bool = bool(reader.get_tag(feed, "should_send_embed")) - - context = { - "request": request, - "feed": feed, - "entries": entries, - "feed_counts": reader.get_feed_counts(feed=clean_feed_url), - "html": html, - "should_send_embed": should_send_embed, - "show_more_button": False, + "last_entry": last_entry, + "show_more_entires_button": show_more_entires_button, + "total_entries": total_entries, } return templates.TemplateResponse(request=request, name="feed.html", context=context) diff --git a/discord_rss_bot/static/styles.css b/discord_rss_bot/static/styles.css index 43a6428..db0cfba 100644 --- a/discord_rss_bot/static/styles.css +++ b/discord_rss_bot/static/styles.css @@ -7,9 +7,9 @@ body { } .text-muted { - color: #888888 !important; + color: #acabab !important; } .form-text { - color: #888888; + color: #acabab; } diff --git a/discord_rss_bot/templates/add.html b/discord_rss_bot/templates/add.html index 04c969f..485c090 100644 --- a/discord_rss_bot/templates/add.html +++ b/discord_rss_bot/templates/add.html @@ -1,39 +1,32 @@ {% extends "base.html" %} {% block title %} - | Add new feed +| Add new feed {% endblock title %} {% block content %} -
-
- -
- -
- -
+
+ + +
+ +
+
- -
- -
- -
-
- You can add more webhooks here -
+
+ +
+ +
+
- -
- -
- -
+
+ +
+ +
+ +
{% endblock content %} diff --git a/discord_rss_bot/templates/add_webhook.html b/discord_rss_bot/templates/add_webhook.html index 5aca422..5948395 100644 --- a/discord_rss_bot/templates/add_webhook.html +++ b/discord_rss_bot/templates/add_webhook.html @@ -1,40 +1,34 @@ {% extends "base.html" %} {% block title %} - | Add new webhook +| Add new webhook {% endblock title %} {% block content %} -
-
- {# Webhook name #} -
- -
- -
+
+ + {# Webhook name #} +
+ +
+
- {# Webhook URL #} -
- -
- -
-
- You can append ?thread_id=THREAD_ID to the end - of the URL to send messages to a thread. You can get - the thread ID by right-clicking on the thread and - Copy Thread ID. -
+
+ {# Webhook URL #} +
+ +
+
- {# Submit button #} -
- +
+ You can append ?thread_id=THREAD_ID to the end + of the URL to send messages to a thread. You can get + the thread ID by right-clicking on the thread and + Copy Thread ID.
- -
+
+ {# Submit button #} +
+ +
+ +
{% endblock content %} diff --git a/discord_rss_bot/templates/base.html b/discord_rss_bot/templates/base.html index f046a13..a8640dd 100644 --- a/discord_rss_bot/templates/base.html +++ b/discord_rss_bot/templates/base.html @@ -1,12 +1,13 @@ + + content="Stay updated with the latest news and events with our easy-to-use RSS bot. Never miss a message or announcement again with real-time notifications directly to your Discord server." /> + content="discord, rss, bot, notifications, announcements, updates, real-time, server, messages, news, events, feed." /> @@ -17,11 +18,19 @@ {% block head %} {% endblock head %} + {% include "nav.html" %}
+ {% if messages %} + + {% endif %} + {% block content %} {% endblock content %}
@@ -32,11 +41,11 @@
@@ -45,4 +54,5 @@
+ diff --git a/discord_rss_bot/templates/blacklist.html b/discord_rss_bot/templates/blacklist.html index d4e4838..3632277 100644 --- a/discord_rss_bot/templates/blacklist.html +++ b/discord_rss_bot/templates/blacklist.html @@ -1,67 +1,55 @@ {% extends "base.html" %} {% block title %} - | Blacklist +| Blacklist {% endblock title %} {% block content %} -
-
- -
-
-
-
    -
  • - Comma separated list of words to blacklist. If a word is found in the - corresponding blacklists, the feed will not be sent. -
  • -
  • Whitelist always takes precedence over blacklist. Leave empty to disable.
  • -
  • Words are case-insensitive. No spaces should be used before or after the comma.
  • -
  • - Correct: - +
    + + +
    +
    +
    +
      +
    • + Comma separated list of words to blacklist. If a word is found in the + corresponding blacklists, the feed will not be sent. +
    • +
    • Whitelist always takes precedence over blacklist. Leave empty to disable.
    • +
    • Words are case-insensitive. No spaces should be used before or after the comma.
    • +
    • + Correct: + primogem,events,gameplay preview,special program -
    • -
    • - Wrong: - +
    • +
    • + Wrong: + primogem, events, gameplay preview, special program -
    • -
    -
    - - - - - - - - +
  • +
+ + + + + + + +
- - - -
- -
- -
+
+ + + +
+ +
+ +
{% endblock content %} diff --git a/discord_rss_bot/templates/custom.html b/discord_rss_bot/templates/custom.html index db56359..f018d3a 100644 --- a/discord_rss_bot/templates/custom.html +++ b/discord_rss_bot/templates/custom.html @@ -1,259 +1,252 @@ {% extends "base.html" %} {% block title %} - | Custom message +| Custom message {% endblock title %} {% block content %} -
-
- -
-
-
-
    -
  • You can modify the message that is sent to Discord.
  • -
  • You can use \n to create a new line.
  • -
  • - You can remove the embed from links by adding < and > around the link. (For example < - {% raw %} - {{ entry_link }} - {% endraw %} - >) -
  • -
    -
  • - +
    + + +
    +
    +
    +
      +
    • You can modify the message that is sent to Discord.
    • +
    • You can use \n to create a new line.
    • +
    • + You can remove the embed from links by adding < and> around the link. (For example < + {% raw %} {{ entry_link }} {% endraw %}>) +
    • +
      +
    • + {% raw %} {{ feed_author }} {% endraw %} {{ feed.author }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_added }} {% endraw %} {{ feed.added }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_last_exception }} {% endraw %} {{ feed.last_exception }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_last_updated }} {% endraw %} {{ feed.last_updated }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_link }} {% endraw %} {{ feed.link }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_subtitle }} {% endraw %} {{ feed.subtitle }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_title }} {% endraw %} {{ feed.title }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_updated }} {% endraw %} {{ feed.updated }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_updates_enabled }} {% endraw %} {{ feed.updates_enabled }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_url }} {% endraw %} {{ feed.url }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_user_title }} {% endraw %} {{ feed.user_title }} -
    • -
    • - +
    • +
    • + {% raw %} {{ feed_version }} {% endraw %} {{ feed.version }} -
    • -
      - {% if entry %} -
    • - +
    • +
      + {% if entry %} +
    • + {% raw %} {{ entry_added }} {% endraw %} {{ entry.added }} -
    • -
    • - +
    • +
    • + {% raw %} {{ entry_author }} {% endraw %} {{ entry.author }} -
    • - {% if entry.content %} -
    • - +
    • + {% if entry.content %} +
    • + {% raw %} {{ entry_content }} {% endraw %} {{ entry.content[0].value|discord_markdown }} -
    • -
    • - +
    • +
    • + {% raw %} {{ entry_content_raw }} {% endraw %} {{ entry.content[0].value }} -
    • - {% endif %} -
    • - +
    • + {% endif %} +
    • + {% raw %} {{ entry_id }} {% endraw %} {{ entry.id }} -
    • -
    • - +
    • +
    • + {% raw %} {{ entry_important }} {% endraw %} {{ entry.important }} -
    • -
    • - +
    • +
    • + {% raw %} {{ entry_link }} {% endraw %} {{ entry.link }} -
    • -
    • - +
    • +
    • + {% raw %} {{ entry_published }} {% endraw %} {{ entry.published }} -
    • -
    • - +
    • +
    • + {% raw %} {{ entry_read }} {% endraw %} {{ entry.read }} -
    • -
    • - +
    • +
    • + {% raw %} {{ entry_read_modified }} {% endraw %} {{ entry.read_modified }} -
    • - {% if entry.summary %} -
    • - +
    • + {% if entry.summary %} +
    • + {% raw %} {{ entry_summary }} {% endraw %} {{ entry.summary|discord_markdown }} -
    • -
    • - +
    • +
    • + {% raw %} {{ entry_summary_raw }} {% endraw %} {{ entry.summary }} -
    • - {% endif %} -
    • - +
    • + {% endif %} +
    • + {% raw %} {{ entry_title }} {% endraw %} {{ entry.title }} -
    • -
    • - +
    • +
    • + {% raw %} {{ entry_text }} {% endraw %} Same as entry_content if it exists, otherwise entry_summary -
    • -
    • - +
    • +
    • + {% raw %} {{ entry_updated }} {% endraw %} {{ entry.updated }} -
    • -
      -
    • - +
    • +
      +
    • + {% raw %} {{ image_1 }} {% endraw %} First image in the entry if it exists -
    • -
    -
      -
    • Examples:
    • -
    • - +
    • +
    +
      +
    • Examples:
    • +
    • + {% raw %} {{ feed_title }}\n{{ entry_content }} {% endraw %} -
    • -
    - {% else %} - Something went wrong, there was no entry found. If this feed has entries and you still see this message, please contact the developer. - {% endif %} -
    - - +
  • +
+ {% else %} + Something went wrong, there was no entry found. If this feed has entries and you still see this + message, please contact the developer. + {% endif %}
+ +
- - - -
- -
- -
+
+ + + +
+ +
+ +
{% endblock content %} diff --git a/discord_rss_bot/templates/embed.html b/discord_rss_bot/templates/embed.html index e3bd516..ff42e16 100644 --- a/discord_rss_bot/templates/embed.html +++ b/discord_rss_bot/templates/embed.html @@ -1,15 +1,15 @@ {% extends "base.html" %} {% block title %} - | Embed +| Embed {% endblock title %} {% block content %} -
-
-
-
-
-
    -
    +
    + +
    +
    +
    +
      +
    • {% raw %} @@ -94,218 +94,170 @@ {% endraw %} {{feed.version}}
    • -
      +
      {% if entry %} -
    • - +
    • + {% raw %} {{entry_added}} {% endraw %} {{entry.added}} -
    • -
    • - +
    • +
    • + {% raw %} {{entry_author}} {% endraw %} {{entry.author}} -
    • - {% if entry.content %} -
    • - +
    • + {% if entry.content %} +
    • + {% raw %} {{entry_content}} {% endraw %} {{entry.content[0].value|discord_markdown}} -
    • -
    • - +
    • +
    • + {% raw %} {{entry_content_raw}} {% endraw %} {{entry.content[0].value}} -
    • - {% endif %} -
    • - +
    • + {% endif %} +
    • + {% raw %} {{entry_id}} {% endraw %} {{entry.id}} -
    • -
    • - +
    • +
    • + {% raw %} {{entry_important}} {% endraw %} {{entry.important}} -
    • -
    • - +
    • +
    • + {% raw %} {{entry_link}} {% endraw %} {{entry.link}} -
    • -
    • - +
    • +
    • + {% raw %} {{entry_published}} {% endraw %} {{entry.published}} -
    • -
    • - +
    • +
    • + {% raw %} {{entry_read}} {% endraw %} {{entry.read}} -
    • -
    • - +
    • +
    • + {% raw %} {{entry_read_modified}} {% endraw %} {{entry.read_modified}} -
    • - {% if entry.summary %} -
    • - +
    • + {% if entry.summary %} +
    • + {% raw %} {{entry_summary}} {% endraw %} {{entry.summary|discord_markdown}} -
    • -
    • - +
    • +
    • + {% raw %} {{entry_summary_raw}} {% endraw %} {{entry.summary}} -
    • - {% endif %} -
    • - +
    • + {% endif %} +
    • + {% raw %} {{entry_title}} {% endraw %} {{entry.title}} -
    • -
    • - +
    • +
    • + {% raw %} {{entry_text}} {% endraw %} Same as entry_content if it exists, otherwise entry_summary -
    • -
    • - +
    • +
    • + {% raw %} {{entry_updated}} {% endraw %} {{entry.updated}} -
    • -
      -
    • - +
    • +
      +
    • + {% raw %} {{image_1}} {% endraw %} First image in the entry if it exists -
    • -
    + +
{% else %} - Something went wrong, there was no entry found. If this feed has entries and you still see this message, please contact the developer. + Something went wrong, there was no entry found. If this feed has entries and you still see this + message, please contact the developer. {% endif %}
- - - - - - - - - - - - - - - - - - - - -
+ + + + + + + + + + + + + + + + + + + +
- - - -
- -
- -
+
+ + + +
+ +
+ +
{% endblock content %} diff --git a/discord_rss_bot/templates/feed.html b/discord_rss_bot/templates/feed.html index e10c969..5dd85c0 100644 --- a/discord_rss_bot/templates/feed.html +++ b/discord_rss_bot/templates/feed.html @@ -1,68 +1,84 @@ {% extends "base.html" %} {% block title %} - | {{ feed.title }} +| {{ feed.title }} {% endblock title %} {% block content %} -
- -

- {{ feed.title }} -

- {% if not feed.updates_enabled %}Disabled{% endif %} - {% if feed.last_exception %} -

{{ feed.last_exception.type_name }}:

- {{ feed.last_exception.value_str }} -
{{ feed.last_exception.traceback_str }}
- {% endif %} -
- -
- {% if not feed.updates_enabled %} -
- -
- {% else %} -
- -
- {% endif %} - {% if should_send_embed == True %} -
- -
- {% else %} -
- -
- {% endif %} - Whitelist -
- Blacklist -
- Customize message - {% if not should_send_embed %}(Active){% endif %} - -
- Customize embed - {% if should_send_embed %}(Active){% endif %} - -
-
- {# HTML is created in main.create_html_for_feed #} -
-    {{ html|safe }}
-    
- {% if show_more_button %} - Show more (Note: This view is not optimized at all, so be ready to wait a while) +
+ +

+ {{ feed.title }} ({{ total_entries }} entries) +

+ {% if not feed.updates_enabled %} + Disabled {% endif %} + + {% if feed.last_exception %} +
+
{{ feed.last_exception.type_name }}:
+ {{ feed.last_exception.value_str }} + +
+
{{ feed.last_exception.traceback_str }}
+
+
+ {% endif %} + + +
+
+ +
+ + {% if not feed.updates_enabled %} +
+ +
+ {% else %} +
+ +
+ {% endif %} + + {% if should_send_embed %} +
+ +
+ {% else %} +
+ +
+ {% endif %} +
+ + + +
+ +{# Rendered HTML content #} +
{{ html|safe }}
+ +{% if show_more_entires_button %} + + Show more entries + +{% endif %} + {% endblock content %} diff --git a/discord_rss_bot/templates/index.html b/discord_rss_bot/templates/index.html index 54fc3dd..78f0729 100644 --- a/discord_rss_bot/templates/index.html +++ b/discord_rss_bot/templates/index.html @@ -1,88 +1,92 @@ {% extends "base.html" %} {% block content %} - -
    - - {% if feeds %} -

    - {{ feed_count.total }} feed{{'s' if feed_count.total > 1 else "" }} - - - {% if feed_count.broken %} - - {{ feed_count.broken }} broken - {% else %} - - {{ feed_count.broken }} broken - {% endif %} - - - {% if feed_count.total != feed_count.updates_enabled %} - - {{ feed_count.updates_enabled }} enabled - {% else %} - - {{ feed_count.updates_enabled }} enabled - {% endif %} - - - {{ entry_count.total }} entries - - ({{ entry_count.averages[0]|round(1) }}, - {{ entry_count.averages[1]|round(1) }}, - {{ entry_count.averages[2]|round(1) }}) - -

    - - {% for hook_from_context in webhooks %} -
    -
      -

      - {{ hook_from_context.name }} -

      - {% for feed_webhook in feeds %} - {% set feed = feed_webhook["feed"] %} - {% set hook_from_feed = feed_webhook["webhook"] %} - {% if hook_from_context.url == hook_from_feed %} -
      - {{ feed.url }} - {% if not feed.updates_enabled %}Disabled{% endif %} - {% if feed.last_exception %}({{ feed.last_exception.value_str }}){% endif %} -
      - {% endif %} - {% endfor %} -
    -
    - {% endfor %} + +
      + + {% if feeds %} +

      + {{ feed_count.total }} feed{{'s' if feed_count.total > 1 else "" }} + + + {% if feed_count.broken %} + - {{ feed_count.broken }} broken {% else %} -

      - Hello there! -
      - You need to add a webhook to get started and then add a feed. You can find both options in the menu above. -
      -
      - If you have any questions or suggestions, feel free to contact me on tlovinator@gmail.com or TheLovinator#9276 on Discord. -
      -Thanks! -

      -{% endif %} - -{% if broken_feeds %} + - {{ feed_count.broken }} broken + {% endif %} + + + {% if feed_count.total != feed_count.updates_enabled %} + - {{ feed_count.updates_enabled }} enabled + {% else %} + - {{ feed_count.updates_enabled }} enabled + {% endif %} + + - {{ entry_count.total }} entries + + ({{ entry_count.averages[0]|round(1) }}, + {{ entry_count.averages[1]|round(1) }}, + {{ entry_count.averages[2]|round(1) }}) + +

      + + {% for hook_from_context in webhooks %} +
      +

      + {{ hook_from_context.name }} +

      +
        + {% for feed_webhook in feeds %} + {% set feed = feed_webhook["feed"] %} + {% set hook_from_feed = feed_webhook["webhook"] %} + {% if hook_from_context.url == hook_from_feed %} +
        + {{ feed.url }} + {% if not feed.updates_enabled %}Disabled{% endif %} + {% if feed.last_exception %}({{ feed.last_exception.value_str }}){% endif %} +
        + {% endif %} + {% endfor %} +
      +
      + {% endfor %} + {% else %} +

      + Hello there! +
      + You need to add a webhook here to get started. After that, you can + add feeds here. You can find both of these links in the navigation bar + above. +
      +
      + If you have any questions or suggestions, feel free to contact me on tlovinator@gmail.com or TheLovinator#9276 on Discord. +
      +
      + Thanks! +

      + {% endif %} + + {% if broken_feeds %}
      -{% endif %} - -{% if feeds_without_attached_webhook %} + {% endif %} + + {% if feeds_without_attached_webhook %}
      -{% endif %} + {% endif %}
    {% endblock content %} diff --git a/discord_rss_bot/templates/nav.html b/discord_rss_bot/templates/nav.html index e14b57e..8b9ee37 100644 --- a/discord_rss_bot/templates/nav.html +++ b/discord_rss_bot/templates/nav.html @@ -1,9 +1,6 @@
{# Search #}
- +
{# Donate button #} +
+
+ +
+ + + +
+
+ +
+
+
+
+
+ + +
+
+
+ {% endfor %} +
+ You can append ?thread_id=THREAD_ID to the URL to send messages to a thread. +
+
+ +
{% endblock content %} diff --git a/discord_rss_bot/templates/whitelist.html b/discord_rss_bot/templates/whitelist.html index 44e3a76..5a958f6 100644 --- a/discord_rss_bot/templates/whitelist.html +++ b/discord_rss_bot/templates/whitelist.html @@ -1,67 +1,55 @@ {% extends "base.html" %} {% block title %} - | Blacklist +| Blacklist {% endblock title %} {% block content %} -
-
- -
-
-
-
    -
  • - Comma separated list of words to whitelist. Only send message to - Discord if one of these words are present in the corresponding fields. -
  • -
  • Whitelist always takes precedence over blacklist. Leave empty to disable.
  • -
  • Words are case-insensitive. No spaces should be used before or after the comma.
  • -
  • - Correct: - +
    + + +
    +
    +
    +
      +
    • + Comma separated list of words to whitelist. Only send message to + Discord if one of these words are present in the corresponding fields. +
    • +
    • Whitelist always takes precedence over blacklist. Leave empty to disable.
    • +
    • Words are case-insensitive. No spaces should be used before or after the comma.
    • +
    • + Correct: + primogem,events,gameplay preview,special program -
    • -
    • - Wrong: - +
    • +
    • + Wrong: + primogem, events, gameplay preview, special program -
    • -
    -
    - - - - - - - - +
  • +
+ + + + + + + +
- - - -
- -
- -
+
+ + + +
+ +
+ + {% endblock content %} diff --git a/discord_rss_bot/webhook.py b/discord_rss_bot/webhook.py deleted file mode 100644 index 82ae667..0000000 --- a/discord_rss_bot/webhook.py +++ /dev/null @@ -1,74 +0,0 @@ -from typing import cast - -from fastapi import HTTPException -from reader import Reader - -from discord_rss_bot.missing_tags import add_missing_tags - - -def add_webhook(reader: Reader, webhook_name: str, webhook_url: str) -> None: - """Add new webhook. - - Args: - reader: The Reader to use - webhook_name: The name of the webhook, this will be shown on the webpage - webhook_url: The webhook URL to send entries to - - Raises: - HTTPException: This is raised when the webhook already exists - """ - # Get current webhooks from the database if they exist otherwise use an empty list. - webhooks = list(reader.get_tag((), "webhooks", [])) - - # Webhooks are stored as a list of dictionaries. - # Example: [{"name": "webhook_name", "url": "webhook_url"}] - webhooks = cast(list[dict[str, str]], webhooks) - - # Only add the webhook if it doesn't already exist. - if all(webhook["name"] != webhook_name.strip() for webhook in webhooks): - # Add the new webhook to the list of webhooks. - webhooks.append({"name": webhook_name.strip(), "url": webhook_url.strip()}) - - # Add our new list of webhooks to the database. - reader.set_tag((), "webhooks", webhooks) # type: ignore - - add_missing_tags(reader) - return - - # TODO(TheLovinator): Show this error on the page. - # TODO(TheLovinator): Replace HTTPException with a custom exception. - raise HTTPException(status_code=409, detail="Webhook already exists") - - -def remove_webhook(reader: Reader, webhook_url: str) -> None: - """Remove webhook. - - Args: - reader (Reader): The Reader to use - webhook_url (str): The webhook URL to remove - - Raises: - HTTPException: If webhook could not be deleted - HTTPException: Webhook not found - """ - # TODO(TheLovinator): Replace HTTPException with a custom exception for both of these. - # Get current webhooks from the database if they exist otherwise use an empty list. - webhooks = list(reader.get_tag((), "webhooks", [])) - - # Webhooks are stored as a list of dictionaries. - # Example: [{"name": "webhook_name", "url": "webhook_url"}] - webhooks = cast(list[dict[str, str]], webhooks) - - # Only add the webhook if it doesn't already exist. - webhooks_to_remove = [webhook for webhook in webhooks if webhook["url"] in webhook_url.strip()] - - # Remove the webhooks outside of the loop. - for webhook in webhooks_to_remove: - webhooks.remove(webhook) - - # Check if any webhooks were removed. - if any(webhook in webhooks for webhook in webhooks_to_remove): - raise HTTPException(status_code=500, detail="Webhook could not be deleted") - - # Add our new list of webhooks to the database. - reader.set_tag((), "webhooks", webhooks) # type: ignore diff --git a/poetry.lock b/poetry.lock index 227f05c..7d6ebf0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -286,13 +286,13 @@ files = [ [[package]] name = "djlint" -version = "1.35.2" +version = "1.35.3" description = "HTML Template Linter and Formatter" optional = false -python-versions = "<4.0,>=3.8" +python-versions = ">=3.9" files = [ - {file = "djlint-1.35.2-py3-none-any.whl", hash = "sha256:4ba995bad378f2afa77c8ea56ba1c14429d9ff26a18e8ae23bc71eedb9152243"}, - {file = "djlint-1.35.2.tar.gz", hash = "sha256:318de9d4b9b0061a111f8f5164ecbacd8215f449dd4bd5a76d2a691c815ee103"}, + {file = "djlint-1.35.3-py3-none-any.whl", hash = "sha256:bf2f23798909f9c5a110925c369538383de0141f9a2be37ee0d26422d41b7543"}, + {file = "djlint-1.35.3.tar.gz", hash = "sha256:780ea3e25662fca89033fa96ecf656099954d6f81dce039eac90f4bba3cbe850"}, ] [package.dependencies] @@ -300,11 +300,11 @@ click = ">=8.0.1" colorama = ">=0.4.4" cssbeautifier = ">=1.14.4" html-tag-names = ">=0.1.2" -html-void-elements = ">=0.1.0" +html-void-elements = ">=0.1" jsbeautifier = ">=1.14.4" json5 = ">=0.9.11" -pathspec = ">=0.12.0" -PyYAML = ">=6.0" +pathspec = ">=0.12" +pyyaml = ">=6" regex = ">=2023" tqdm = ">=4.62.2" @@ -424,54 +424,54 @@ trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httptools" -version = "0.6.2" +version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" files = [ - {file = "httptools-0.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0238f07780782c018e9801d8f5f5aea3a4680a1af132034b444f677718c6fe88"}, - {file = "httptools-0.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10d28e5597d4349390c640232c9366ddc15568114f56724fe30a53de9686b6ab"}, - {file = "httptools-0.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ddaf99e362ae4169f6a8b3508f3487264e0a1b1e58c0b07b86407bc9ecee831"}, - {file = "httptools-0.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc9d039b6b8a36b182bc60774bb5d456b8ff9ec44cf97719f2f38bb1dcdd546"}, - {file = "httptools-0.6.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b57cb8a4a8a8ffdaf0395326ef3b9c1aba36e58a421438fc04c002a1f511db63"}, - {file = "httptools-0.6.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b73cda1326738eab5d60640ca0b87ac4e4db09a099423c41b59a5681917e8d1d"}, - {file = "httptools-0.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:352a496244360deb1c1d108391d76cd6f3dd9f53ccf975a082e74c6761af30c9"}, - {file = "httptools-0.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2e9d225b178a6cc700c23cf2f5daf85a10f93f1db7c34e9ee4ee0bbc29ad458a"}, - {file = "httptools-0.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49b14fcc9b12a52da8667587efa124a18e1a3eb63bbbcabf9882f4008d171d6"}, - {file = "httptools-0.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d5c33d98b2311ddbe06e92b12b14de334dcfbe64ebcbb2c7a34b5c6036db512"}, - {file = "httptools-0.6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53cd2d776700bf0ed0e6fb203d716b041712ea4906479031cc5ac5421ecaa7d2"}, - {file = "httptools-0.6.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7da016a0dab1fcced89dfff8537033c5dc200015e14023368f3f4a69e39b8716"}, - {file = "httptools-0.6.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d6e0ba155a1b3159551ac6b4551eb20028617e2e4bb71f2c61efed0756e6825"}, - {file = "httptools-0.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:ad44569b0f508e046ffe85b4a547d5b68d1548fd90767df69449cc28021ee709"}, - {file = "httptools-0.6.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c92d2b7c1a914ab2f66454961eeaf904f4fe7529b93ff537619d22c18b82d070"}, - {file = "httptools-0.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f920a75c1dbcb5a48a495f384d73ceb41e437a966c318eb7e56f1c1ad1df3e"}, - {file = "httptools-0.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56bcd9ba0adf16edb4e3e45b8b9346f5b3b2372402e953d54c84b345d0f691e0"}, - {file = "httptools-0.6.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e350a887adb38ac65c93c2f395b60cf482baca61fd396ed8d6fd313dbcce6fac"}, - {file = "httptools-0.6.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ddc328c2a2daf2cf4bdc7bbc8a458dc4c840637223d4b8e01bce2168cc79fd23"}, - {file = "httptools-0.6.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ddaf38943dbb32333a182c894b6092a68b56c5e36d0c54ba3761d28119b15447"}, - {file = "httptools-0.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:052f7f50e4a38f069478143878371ed17937f268349bcd68f6f7a9de9fcfce21"}, - {file = "httptools-0.6.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:406f7dc5d9db68cd9ac638d14c74d077085f76b45f704d3ec38d43b842b3cb44"}, - {file = "httptools-0.6.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:77e22c33123ce11231ff2773d8905e20b45d77a69459def7481283b72a583955"}, - {file = "httptools-0.6.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41965586b02715c3d83dd9153001f654e5b621de0c5255f5ef0635485212d0c0"}, - {file = "httptools-0.6.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93b1839d54b80a06a51a31b90d024a1770e250d00de57e7ae069bafba932f398"}, - {file = "httptools-0.6.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8fdb4634040d1dbde7e0b373e19668cdb61c0ee8690d3b4064ac748d85365bca"}, - {file = "httptools-0.6.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c30902f9b9da0d74668b6f71d7b57081a4879d9a5ea93d5922dbe15b15b3b24a"}, - {file = "httptools-0.6.2-cp313-cp313-win_amd64.whl", hash = "sha256:cf61238811a75335751b4b17f8b221a35f93f2d57489296742adf98412d2a568"}, - {file = "httptools-0.6.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8d80878cb40ebf88a48839ff7206ceb62e4b54327e0c2f9f15ee12edbd8b907e"}, - {file = "httptools-0.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5141ccc9dbd8cdc59d1e93e318d405477a940dc6ebadcb8d9f8da17d2812d353"}, - {file = "httptools-0.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb67d47f045f56e9a5da4deccf710bdde21212e4b1f4776b7a542449f6a7682"}, - {file = "httptools-0.6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dcb8f5c866f1537ccbaad01ebb3611890d281ef8d25e050d1cc3d90fba6b3d"}, - {file = "httptools-0.6.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1b7bc59362143dc2d02896dde94004ef54ff1989ceedf4b389ad3b530f312364"}, - {file = "httptools-0.6.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c7a5715b1f46e9852442f496c0df2f8c393cc8f293f5396d2c8d95cac852fb51"}, - {file = "httptools-0.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:3f0246ca7f78fa8e3902ddb985b9f55509d417a862f4634a8fa63a7a496266c8"}, - {file = "httptools-0.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1099f73952e18c718ccaaf7a97ae58c94a91839c3d247c6184326f85a2eda7b4"}, - {file = "httptools-0.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3e45d004531330030f7d07abe4865bc17963b9989bc1941cebbf7224010fb82"}, - {file = "httptools-0.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f2fea370361a90cb9330610a95303587eda9d1e69930dbbee9978eac1d5946"}, - {file = "httptools-0.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0481154c91725f7e7b729a535190388be6c7cbae3bbf0e793343ca386282312"}, - {file = "httptools-0.6.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d25f8fdbc6cc6561353c7a384d76295e6a85a4945115b8bc347855db150e8c77"}, - {file = "httptools-0.6.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:054bdee08e4f7c15c186f6e7dbc8f0cf974b8dd1832b5f17f988faf8b12815c9"}, - {file = "httptools-0.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:4502620722b453c2c6306fad392c515dcb804dfa9c6d3b90d8926a07a7a01109"}, - {file = "httptools-0.6.2.tar.gz", hash = "sha256:ae694efefcb61317c79b2fa1caebc122060992408e389bb00889567e463a47f1"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, ] [package.extras] @@ -719,72 +719,72 @@ six = ">=1.15,<2" [[package]] name = "markupsafe" -version = "3.0.1" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" files = [ - {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-win32.whl", hash = "sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-win32.whl", hash = "sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-win32.whl", hash = "sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-win32.whl", hash = "sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-win32.whl", hash = "sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-win32.whl", hash = "sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b"}, - {file = "markupsafe-3.0.1.tar.gz", hash = "sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] @@ -1306,13 +1306,13 @@ files = [ [[package]] name = "starlette" -version = "0.40.0" +version = "0.41.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.40.0-py3-none-any.whl", hash = "sha256:c494a22fae73805376ea6bf88439783ecfba9aac88a43911b48c653437e784c4"}, - {file = "starlette-0.40.0.tar.gz", hash = "sha256:1a3139688fb298ce5e2d661d37046a66ad996ce94be4d4983be019a23a04ea35"}, + {file = "starlette-0.41.2-py3-none-any.whl", hash = "sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d"}, + {file = "starlette-0.41.2.tar.gz", hash = "sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62"}, ] [package.dependencies] @@ -1334,13 +1334,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.5" +version = "4.66.6" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, + {file = "tqdm-4.66.6-py3-none-any.whl", hash = "sha256:223e8b5359c2efc4b30555531f09e9f2f3589bcd7fdd389271191031b49b7a63"}, + {file = "tqdm-4.66.6.tar.gz", hash = "sha256:4bdd694238bef1485ce839d67967ab50af8f9272aab687c0d7702a01da0be090"}, ] [package.dependencies] @@ -1486,13 +1486,13 @@ test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", [[package]] name = "virtualenv" -version = "20.26.6" +version = "20.27.1" description = "Virtual Python Environment builder" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, - {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, + {file = "virtualenv-20.27.1-py3-none-any.whl", hash = "sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4"}, + {file = "virtualenv-20.27.1.tar.gz", hash = "sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba"}, ] [package.dependencies] @@ -1696,13 +1696,13 @@ files = [ [[package]] name = "werkzeug" -version = "3.0.4" +version = "3.0.6" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, - {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, + {file = "werkzeug-3.0.6-py3-none-any.whl", hash = "sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17"}, + {file = "werkzeug-3.0.6.tar.gz", hash = "sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d"}, ] [package.dependencies] @@ -1714,4 +1714,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "98070c60b543155514af67ba4ecc2d9b4b1a31564d8987df51fa037ee1f6a038" +content-hash = "fc70531fe20fb5edd5c7132c1603ff5b20735df7f181120385a675fe4729599f" diff --git a/pyproject.toml b/pyproject.toml index 331c2ca..453f0fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ uvicorn = { extras = ["standard"], version = "^0.32.0" } markdownify = "^0.13.0" [tool.poetry.group.dev.dependencies] -djlint = "^1.35.2" +djlint = "^1.35.3" pre-commit = "^4.0.1" pytest = "^7.4.4" diff --git a/tests/test_blacklist.py b/tests/test_blacklist.py index e2b3c13..4639240 100644 --- a/tests/test_blacklist.py +++ b/tests/test_blacklist.py @@ -6,7 +6,7 @@ from typing import TYPE_CHECKING from reader import Entry, Feed, Reader, make_reader -from discord_rss_bot.filter.blacklist import has_black_tags, should_be_skipped +from discord_rss_bot.filter.blacklist import entry_should_be_skipped, feed_has_blacklist_tags if TYPE_CHECKING: from collections.abc import Iterable @@ -33,7 +33,8 @@ def test_has_black_tags() -> None: reader.update_feeds() # Test feed without any blacklist tags - assert has_black_tags(custom_reader=get_reader(), feed=feed) is False + assert_msg: str = "Feed should not have any blacklist tags" + assert feed_has_blacklist_tags(custom_reader=get_reader(), feed=feed) is False, assert_msg check_if_has_tag(reader, feed, "blacklist_title") check_if_has_tag(reader, feed, "blacklist_summary") @@ -45,9 +46,12 @@ def test_has_black_tags() -> None: def check_if_has_tag(reader: Reader, feed: Feed, blacklist_name: str) -> None: reader.set_tag(feed, blacklist_name, "a") # type: ignore - assert has_black_tags(custom_reader=reader, feed=feed) is True + assert_msg: str = f"Feed should have blacklist tags: {blacklist_name}" + assert feed_has_blacklist_tags(custom_reader=reader, feed=feed) is True, assert_msg + + asset_msg: str = f"Feed should not have any blacklist tags: {blacklist_name}" reader.delete_tag(feed, blacklist_name) - assert has_black_tags(custom_reader=reader, feed=feed) is False + assert feed_has_blacklist_tags(custom_reader=reader, feed=feed) is False, asset_msg def test_should_be_skipped() -> None: @@ -61,51 +65,51 @@ def test_should_be_skipped() -> None: # Get first entry first_entry: list[Entry] = [] entries: Iterable[Entry] = reader.get_entries(feed=feed) - assert entries is not None + assert entries is not None, f"Entries should not be None: {entries}" for entry in entries: first_entry.append(entry) break - assert len(first_entry) == 1 + assert len(first_entry) == 1, f"First entry should be added: {first_entry}" # Test entry without any blacklists - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.set_tag(feed, "blacklist_title", "fvnnnfnfdnfdnfd") # type: ignore - assert should_be_skipped(reader, first_entry[0]) is True + assert entry_should_be_skipped(reader, first_entry[0]) is True, f"Entry should be skipped: {first_entry[0]}" reader.delete_tag(feed, "blacklist_title") - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.set_tag(feed, "blacklist_title", "åäö") # type: ignore - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.delete_tag(feed, "blacklist_title") - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.set_tag(feed, "blacklist_summary", "ffdnfdnfdnfdnfdndfn") # type: ignore - assert should_be_skipped(reader, first_entry[0]) is True + assert entry_should_be_skipped(reader, first_entry[0]) is True, f"Entry should be skipped: {first_entry[0]}" reader.delete_tag(feed, "blacklist_summary") - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.set_tag(feed, "blacklist_summary", "åäö") # type: ignore - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.delete_tag(feed, "blacklist_summary") - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.set_tag(feed, "blacklist_content", "ffdnfdnfdnfdnfdndfn") # type: ignore - assert should_be_skipped(reader, first_entry[0]) is True + assert entry_should_be_skipped(reader, first_entry[0]) is True, f"Entry should be skipped: {first_entry[0]}" reader.delete_tag(feed, "blacklist_content") - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.set_tag(feed, "blacklist_content", "åäö") # type: ignore - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.delete_tag(feed, "blacklist_content") - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.set_tag(feed, "blacklist_author", "TheLovinator") # type: ignore - assert should_be_skipped(reader, first_entry[0]) is True + assert entry_should_be_skipped(reader, first_entry[0]) is True, f"Entry should be skipped: {first_entry[0]}" reader.delete_tag(feed, "blacklist_author") - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.set_tag(feed, "blacklist_author", "åäö") # type: ignore - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" reader.delete_tag(feed, "blacklist_author") - assert should_be_skipped(reader, first_entry[0]) is False + assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}" diff --git a/tests/test_custom_filter.py b/tests/test_custom_filter.py index 0dc7bdf..dd1fda1 100644 --- a/tests/test_custom_filter.py +++ b/tests/test_custom_filter.py @@ -12,18 +12,26 @@ if TYPE_CHECKING: def test_encode_url() -> None: # Test normal input - assert encode_url("https://www.example.com") == r"https%3A//www.example.com" + assert_msg: str = "Got: {encode_url('https://www.example.com')}, Expected: https%3A//www.example.com" + assert encode_url("https://www.example.com") == r"https%3A//www.example.com", assert_msg + # Test input with spaces - assert encode_url("https://www.example.com/my path") == r"https%3A//www.example.com/my%20path" + assert_msg: str = ( + "Got: {encode_url('https://www.example.com/my path')}, Expected: https%3A//www.example.com/my%20path" + ) + assert encode_url("https://www.example.com/my path") == r"https%3A//www.example.com/my%20path", assert_msg + # Test input with special characters + assert_msg: str = f"Got: {encode_url('https://www.example.com/my path?q=abc&b=1')}, Expected: https%3A//www.example.com/my%20path%3Fq%3Dabc%26b%3D1" # noqa: E501 assert ( encode_url("https://www.example.com/my path?q=abc&b=1") == r"https%3A//www.example.com/my%20path%3Fq%3Dabc%26b%3D1" - ) + ), assert_msg + # Test empty input - assert not encode_url("") + assert not encode_url(""), "Got: True, Expected: False" # Test input as None - assert not encode_url(None) # type: ignore + assert not encode_url(None), "Got: True, Expected: False" def test_entry_is_whitelisted() -> None: @@ -43,7 +51,7 @@ def test_entry_is_whitelisted() -> None: custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_title", "fvnnnfnfdnfdnfd") # type: ignore for entry in custom_reader.get_entries(): if entry_is_whitelisted(entry) is True: - assert entry.title == "fvnnnfnfdnfdnfd" + assert entry.title == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.title}" break custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_title") @@ -51,7 +59,7 @@ def test_entry_is_whitelisted() -> None: custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_summary", "fvnnnfnfdnfdnfd") # type: ignore for entry in custom_reader.get_entries(): if entry_is_whitelisted(entry) is True: - assert entry.summary == "fvnnnfnfdnfdnfd" + assert entry.summary == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.summary}" break custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_summary") @@ -59,7 +67,8 @@ def test_entry_is_whitelisted() -> None: custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_content", "fvnnnfnfdnfdnfd") # type: ignore for entry in custom_reader.get_entries(): if entry_is_whitelisted(entry) is True: - assert entry.content[0].value == "

ffdnfdnfdnfdnfdndfn

" + assert_msg = f"Expected:

ffdnfdnfdnfdnfdndfn

, Got: {entry.content[0].value}" + assert entry.content[0].value == "

ffdnfdnfdnfdnfdndfn

", assert_msg break custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_content") @@ -84,7 +93,7 @@ def test_entry_is_blacklisted() -> None: custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_title", "fvnnnfnfdnfdnfd") # type: ignore for entry in custom_reader.get_entries(): if entry_is_blacklisted(entry) is True: - assert entry.title == "fvnnnfnfdnfdnfd" + assert entry.title == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.title}" break custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_title") @@ -92,7 +101,7 @@ def test_entry_is_blacklisted() -> None: custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_summary", "fvnnnfnfdnfdnfd") # type: ignore for entry in custom_reader.get_entries(): if entry_is_blacklisted(entry) is True: - assert entry.summary == "fvnnnfnfdnfdnfd" + assert entry.summary == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.summary}" break custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_summary") @@ -100,7 +109,8 @@ def test_entry_is_blacklisted() -> None: custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_content", "fvnnnfnfdnfdnfd") # type: ignore for entry in custom_reader.get_entries(): if entry_is_blacklisted(entry) is True: - assert entry.content[0].value == "

ffdnfdnfdnfdnfdndfn

" + assert_msg = f"Expected:

ffdnfdnfdnfdnfdndfn

, Got: {entry.content[0].value}" + assert entry.content[0].value == "

ffdnfdnfdnfdnfdndfn

", assert_msg break custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_content") diff --git a/tests/test_feeds.py b/tests/test_feeds.py index 11d40bf..09fe719 100644 --- a/tests/test_feeds.py +++ b/tests/test_feeds.py @@ -3,11 +3,12 @@ from __future__ import annotations import os import tempfile from pathlib import Path +from typing import LiteralString import pytest from reader import Feed, Reader, make_reader # type: ignore -from discord_rss_bot.feeds import send_to_discord +from discord_rss_bot.feeds import send_to_discord, truncate_webhook_message from discord_rss_bot.missing_tags import add_missing_tags @@ -16,11 +17,11 @@ def test_send_to_discord() -> None: with tempfile.TemporaryDirectory() as temp_dir: # Create the temp directory. Path.mkdir(Path(temp_dir), exist_ok=True) - assert Path.exists(Path(temp_dir)) + assert Path.exists(Path(temp_dir)), f"The directory '{temp_dir}' should exist." # Create a temporary reader. reader: Reader = make_reader(url=str(Path(temp_dir) / "test_db.sqlite")) - assert reader is not None + assert reader is not None, "The reader should not be None." # Add a feed to the reader. reader.add_feed("https://www.reddit.com/r/Python/.rss") @@ -32,7 +33,7 @@ def test_send_to_discord() -> None: # Get the feed. feed: Feed = reader.get_feed("https://www.reddit.com/r/Python/.rss") - assert feed is not None + assert feed is not None, f"The feed should not be None. Got: {feed}" # Get the webhook. webhook_url: str | None = os.environ.get("TEST_WEBHOOK_URL") @@ -41,14 +42,46 @@ def test_send_to_discord() -> None: reader.close() pytest.skip("No webhook URL provided.") - assert webhook_url is not None + assert webhook_url is not None, f"The webhook URL should not be None. Got: {webhook_url}" # Add tag to the feed and check if it is there. reader.set_tag(feed, "webhook", webhook_url) # type: ignore - assert reader.get_tag(feed, "webhook") == webhook_url # type: ignore + assert reader.get_tag(feed, "webhook") == webhook_url, f"The webhook URL should be '{webhook_url}'." # Send the feed to Discord. send_to_discord(custom_reader=reader, feed=feed, do_once=True) # Close the reader, so we can delete the directory. reader.close() + + +def test_truncate_webhook_message_short_message(): + message = "This is a short message." + assert_msg = "The message should remain unchanged if it's less than 4000 characters." + assert truncate_webhook_message(message) == message, assert_msg + + +def test_truncate_webhook_message_exact_length(): + message: LiteralString = "A" * 4000 # Exact length of max_content_length + assert_msg: str = f"The message should remain unchanged if it's exactly {4000} characters." + assert truncate_webhook_message(message) == message, assert_msg + + +def test_truncate_webhook_message_long_message(): + message: str = "A" * 4100 # Exceeds max_content_length + truncated_message: str = truncate_webhook_message(message) + + # Ensure the truncated message length is correct + assert_msg = "The length of the truncated message should be between 3999 and 4000." + assert 3999 <= len(truncated_message) <= 4000, assert_msg + + # Calculate half length for the truncated parts + half_length = (4000 - 3) // 2 + + # Test the beginning of the message + assert_msg = "The beginning of the truncated message should match the original message." + assert truncated_message[:half_length] == "A" * half_length, assert_msg + + # Test the end of the message + assert_msg = "The end of the truncated message should be '...' to indicate truncation." + assert truncated_message[-half_length:] == "A" * half_length, assert_msg diff --git a/tests/test_main.py b/tests/test_main.py index 3360695..27eb214 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,8 +1,9 @@ -from typing import TYPE_CHECKING, Literal +import urllib.parse +from typing import TYPE_CHECKING from fastapi.testclient import TestClient -from discord_rss_bot.main import app, encode_url +from discord_rss_bot.main import app if TYPE_CHECKING: from httpx import Response @@ -11,7 +12,10 @@ client: TestClient = TestClient(app) webhook_name: str = "Hello, I am a webhook!" webhook_url: str = "https://discord.com/api/webhooks/1234567890/abcdefghijklmnopqrstuvwxyz" feed_url: str = "https://lovinator.space/rss_test.xml" -encoded_feed_url: str = encode_url(feed_url) + + +def encoded_feed_url(url: str) -> str: + return urllib.parse.quote(feed_url) if url else "" def test_search() -> None: @@ -20,7 +24,7 @@ def test_search() -> None: feeds: Response = client.get("/") if feed_url in feeds.text: client.post(url="/remove", data={"feed_url": feed_url}) - client.post(url="/remove", data={"feed_url": encoded_feed_url}) + client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)}) # Delete the webhook if it already exists before we run the test. response: Response = client.post(url="/delete_webhook", data={"webhook_url": webhook_url}) @@ -30,27 +34,20 @@ def test_search() -> None: url="/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url}, ) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to add webhook: {response.text}" # Add the feed. response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name}) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to add feed: {response.text}" # Check that the feed was added. response = client.get(url="/") - assert response.status_code == 200 - assert feed_url in response.text + assert response.status_code == 200, f"Failed to get /: {response.text}" + assert feed_url in response.text, f"Feed not found in /: {response.text}" # Search for an entry. response: Response = client.get(url="/search/?query=a") - assert response.status_code == 200 - - -def test_encode_url() -> None: - """Test the encode_url function.""" - before: Literal["https://www.google.com/"] = "https://www.google.com/" - after: Literal["https%3A//www.google.com/"] = "https%3A//www.google.com/" - assert encode_url(url_to_quote=before) == after + assert response.status_code == 200, f"Failed to search for entry: {response.text}" def test_add_webhook() -> None: @@ -63,12 +60,12 @@ def test_add_webhook() -> None: url="/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url}, ) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to add webhook: {response.text}" # Check that the webhook was added. response = client.get(url="/webhooks") - assert response.status_code == 200 - assert webhook_name in response.text + assert response.status_code == 200, f"Failed to get /webhooks: {response.text}" + assert webhook_name in response.text, f"Webhook not found in /webhooks: {response.text}" def test_create_feed() -> None: @@ -77,16 +74,16 @@ def test_create_feed() -> None: feeds: Response = client.get(url="/") if feed_url in feeds.text: client.post(url="/remove", data={"feed_url": feed_url}) - client.post(url="/remove", data={"feed_url": encoded_feed_url}) + client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)}) # Add the feed. response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name}) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to add feed: {response.text}" # Check that the feed was added. response = client.get(url="/") - assert response.status_code == 200 - assert feed_url in response.text + assert response.status_code == 200, f"Failed to get /: {response.text}" + assert feed_url in response.text, f"Feed not found in /: {response.text}" def test_get() -> None: @@ -95,46 +92,43 @@ def test_get() -> None: feeds: Response = client.get("/") if feed_url in feeds.text: client.post(url="/remove", data={"feed_url": feed_url}) - client.post(url="/remove", data={"feed_url": encoded_feed_url}) + client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)}) # Add the feed. response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name}) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to add feed: {response.text}" # Check that the feed was added. response = client.get("/") - assert response.status_code == 200 - assert feed_url in response.text + assert response.status_code == 200, f"Failed to get /: {response.text}" + assert feed_url in response.text, f"Feed not found in /: {response.text}" response: Response = client.get(url="/add") - assert response.status_code == 200 + assert response.status_code == 200, f"/add failed: {response.text}" response: Response = client.get(url="/add_webhook") - assert response.status_code == 200 + assert response.status_code == 200, f"/add_webhook failed: {response.text}" - response: Response = client.get(url="/blacklist", params={"feed_url": encoded_feed_url}) - assert response.status_code == 200 + response: Response = client.get(url="/blacklist", params={"feed_url": encoded_feed_url(feed_url)}) + assert response.status_code == 200, f"/blacklist failed: {response.text}" - response: Response = client.get(url="/custom", params={"feed_url": encoded_feed_url}) - assert response.status_code == 200 + response: Response = client.get(url="/custom", params={"feed_url": encoded_feed_url(feed_url)}) + assert response.status_code == 200, f"/custom failed: {response.text}" - response: Response = client.get(url="/embed", params={"feed_url": encoded_feed_url}) - assert response.status_code == 200 + response: Response = client.get(url="/embed", params={"feed_url": encoded_feed_url(feed_url)}) + assert response.status_code == 200, f"/embed failed: {response.text}" - response: Response = client.get(url="/feed", params={"feed_url": encoded_feed_url}) - assert response.status_code == 200 - - response: Response = client.get(url="/feed_more", params={"feed_url": encoded_feed_url}) - assert response.status_code == 200 + response: Response = client.get(url="/feed", params={"feed_url": encoded_feed_url(feed_url)}) + assert response.status_code == 200, f"/feed failed: {response.text}" response: Response = client.get(url="/") - assert response.status_code == 200 + assert response.status_code == 200, f"/ failed: {response.text}" response: Response = client.get(url="/webhooks") - assert response.status_code == 200 + assert response.status_code == 200, f"/webhooks failed: {response.text}" - response: Response = client.get(url="/whitelist", params={"feed_url": encoded_feed_url}) - assert response.status_code == 200 + response: Response = client.get(url="/whitelist", params={"feed_url": encoded_feed_url(feed_url)}) + assert response.status_code == 200, f"/whitelist failed: {response.text}" def test_pause_feed() -> None: @@ -143,7 +137,7 @@ def test_pause_feed() -> None: feeds: Response = client.get(url="/") if feed_url in feeds.text: client.post(url="/remove", data={"feed_url": feed_url}) - client.post(url="/remove", data={"feed_url": encoded_feed_url}) + client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)}) # Add the feed. response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name}) @@ -152,16 +146,16 @@ def test_pause_feed() -> None: feeds: Response = client.get(url="/") if "Paused" in feeds.text: response: Response = client.post(url="/unpause", data={"feed_url": feed_url}) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to unpause feed: {response.text}" # Pause the feed. response: Response = client.post(url="/pause", data={"feed_url": feed_url}) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to pause feed: {response.text}" # Check that the feed was paused. response = client.get(url="/") - assert response.status_code == 200 - assert feed_url in response.text + assert response.status_code == 200, f"Failed to get /: {response.text}" + assert feed_url in response.text, f"Feed not found in /: {response.text}" def test_unpause_feed() -> None: @@ -170,7 +164,7 @@ def test_unpause_feed() -> None: feeds: Response = client.get("/") if feed_url in feeds.text: client.post(url="/remove", data={"feed_url": feed_url}) - client.post(url="/remove", data={"feed_url": encoded_feed_url}) + client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)}) # Add the feed. response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name}) @@ -179,16 +173,16 @@ def test_unpause_feed() -> None: feeds: Response = client.get(url="/") if "Paused" not in feeds.text: response: Response = client.post(url="/pause", data={"feed_url": feed_url}) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to pause feed: {response.text}" # Unpause the feed. response: Response = client.post(url="/unpause", data={"feed_url": feed_url}) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to unpause feed: {response.text}" # Check that the feed was unpaused. response = client.get(url="/") - assert response.status_code == 200 - assert feed_url in response.text + assert response.status_code == 200, f"Failed to get /: {response.text}" + assert feed_url in response.text, f"Feed not found in /: {response.text}" def test_remove_feed() -> None: @@ -197,19 +191,19 @@ def test_remove_feed() -> None: feeds: Response = client.get(url="/") if feed_url in feeds.text: client.post(url="/remove", data={"feed_url": feed_url}) - client.post(url="/remove", data={"feed_url": encoded_feed_url}) + client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)}) # Add the feed. response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name}) # Remove the feed. response: Response = client.post(url="/remove", data={"feed_url": feed_url}) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to remove feed: {response.text}" # Check that the feed was removed. response = client.get(url="/") - assert response.status_code == 200 - assert feed_url not in response.text + assert response.status_code == 200, f"Failed to get /: {response.text}" + assert feed_url not in response.text, f"Feed found in /: {response.text}" def test_delete_webhook() -> None: @@ -227,9 +221,9 @@ def test_delete_webhook() -> None: # Delete the webhook. response: Response = client.post(url="/delete_webhook", data={"webhook_url": webhook_url}) - assert response.status_code == 200 + assert response.status_code == 200, f"Failed to delete webhook: {response.text}" # Check that the webhook was added. response = client.get(url="/webhooks") - assert response.status_code == 200 - assert webhook_name not in response.text + assert response.status_code == 200, f"Failed to get /webhooks: {response.text}" + assert webhook_name not in response.text, f"Webhook found in /webhooks: {response.text}" diff --git a/tests/test_search.py b/tests/test_search.py index 3b6a577..18a8f15 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -16,26 +16,26 @@ def test_create_html_for_search_results() -> None: with tempfile.TemporaryDirectory() as temp_dir: # Create the temp directory. Path.mkdir(Path(temp_dir), exist_ok=True) - assert Path.exists(Path(temp_dir)) + assert Path.exists(Path(temp_dir)), f"The directory '{temp_dir}' should exist." # Create a temporary reader. reader: Reader = make_reader(url=str(Path(temp_dir, "test_db.sqlite"))) - assert reader is not None + assert reader is not None, "The reader should not be None." # Add a feed to the reader. reader.add_feed("https://lovinator.space/rss_test.xml", exist_ok=True) # Check that the feed was added. feeds: Iterable[Feed] = reader.get_feeds() - assert feeds is not None - assert len(list(feeds)) == 1 + assert feeds is not None, f"The feeds should not be None. Got: {feeds}" + assert len(list(feeds)) == 1, f"The number of feeds should be 1. Got: {len(list(feeds))}" # Update the feed to get the entries. reader.update_feeds() # Get the feed. feed: Feed = reader.get_feed("https://lovinator.space/rss_test.xml") - assert feed is not None + assert feed is not None, f"The feed should not be None. Got: {feed}" # Update the search index. reader.enable_search() @@ -43,8 +43,8 @@ def test_create_html_for_search_results() -> None: # Create the HTML and check if it is not empty. search_html: str = create_html_for_search_results("a", reader) - assert search_html is not None - assert len(search_html) > 10 + assert search_html is not None, f"The search HTML should not be None. Got: {search_html}" + assert len(search_html) > 10, f"The search HTML should be longer than 10 characters. Got: {len(search_html)}" # Close the reader, so we can delete the directory. reader.close() diff --git a/tests/test_settings.py b/tests/test_settings.py index 51cc1fc..e49a0b7 100644 --- a/tests/test_settings.py +++ b/tests/test_settings.py @@ -10,7 +10,7 @@ from discord_rss_bot.settings import data_dir, default_custom_message, get_reade def test_reader() -> None: """Test the reader.""" reader: Reader = get_reader() - assert isinstance(reader, Reader) + assert isinstance(reader, Reader), f"The reader should be an instance of Reader. But it was '{type(reader)}'." # Test the reader with a custom location. with tempfile.TemporaryDirectory() as temp_dir: @@ -19,7 +19,8 @@ def test_reader() -> None: custom_loc: pathlib.Path = pathlib.Path(temp_dir, "custom_loc_db.sqlite") custom_reader: Reader = get_reader(custom_location=str(custom_loc)) - assert isinstance(custom_reader, Reader) + assert_msg = f"The custom reader should be an instance of Reader. But it was '{type(custom_reader)}'." + assert isinstance(custom_reader, Reader), assert_msg # Close the reader, so we can delete the directory. custom_reader.close() @@ -27,12 +28,13 @@ def test_reader() -> None: def test_data_dir() -> None: """Test the data directory.""" - assert Path.exists(Path(data_dir)) + assert Path.exists(Path(data_dir)), f"The data directory '{data_dir}' should exist." def test_default_custom_message() -> None: """Test the default custom message.""" - assert default_custom_message == "{{entry_title}}\n{{entry_link}}" + assert_msg = f"The default custom message should be '{{entry_title}}\n{{entry_link}}'. But it was '{default_custom_message}'." # noqa: E501 + assert default_custom_message == "{{entry_title}}\n{{entry_link}}", assert_msg def test_get_webhook_for_entry() -> None: @@ -52,7 +54,7 @@ def test_get_webhook_for_entry() -> None: # Add a webhook to the database. custom_reader.set_tag("https://www.reddit.com/r/movies.rss", "webhook", "https://example.com") # type: ignore our_tag: str = custom_reader.get_tag("https://www.reddit.com/r/movies.rss", "webhook") # type: ignore - assert our_tag == "https://example.com" + assert our_tag == "https://example.com", f"The tag should be 'https://example.com'. But it was '{our_tag}'." # Close the reader, so we can delete the directory. custom_reader.close() diff --git a/tests/test_utils.py b/tests/test_utils.py index f31dc89..4c5d2ca 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -2,11 +2,13 @@ from discord_rss_bot.filter.utils import is_word_in_text def test_is_word_in_text() -> None: - assert is_word_in_text("word1,word2", "This is a sample text containing word1 and word2.") is True - assert is_word_in_text("word1,word2", "This is a sample text containing word1.") is True - assert is_word_in_text("word1,word2", "This is a sample text containing word2.") is True - assert is_word_in_text("word1,word2", "This is a sample text containing WORD1 and WORD2.") is True - assert is_word_in_text("Alert,Forma", "Outbreak - Mutagen Mass - Rhea (Saturn)") is False + msg_true = "Should return True" + msg_false = "Should return False" - assert is_word_in_text("Alert,Forma", "Outbreak - Mutagen Mass - Rhea (Saturn)") is False - assert is_word_in_text("word1,word2", "This is a sample text containing none of the words.") is False + assert is_word_in_text("word1,word2", "This is a sample text containing word1 and word2.") is True, msg_true + assert is_word_in_text("word1,word2", "This is a sample text containing word1.") is True, msg_true + assert is_word_in_text("word1,word2", "This is a sample text containing word2.") is True, msg_true + assert is_word_in_text("word1,word2", "This is a sample text containing WORD1 and WORD2.") is True, msg_true + assert is_word_in_text("Alert,Forma", "Outbreak - Mutagen Mass - Rhea (Saturn)") is False, msg_false + assert is_word_in_text("Alert,Forma", "Outbreak - Mutagen Mass - Rhea (Saturn)") is False, msg_false + assert is_word_in_text("word1,word2", "This is a sample text containing none of the words.") is False, msg_false diff --git a/tests/test_whitelist.py b/tests/test_whitelist.py index 4e52eef..47ecc46 100644 --- a/tests/test_whitelist.py +++ b/tests/test_whitelist.py @@ -33,7 +33,7 @@ def test_has_white_tags() -> None: reader.update_feeds() # Test feed without any whitelist tags - assert has_white_tags(custom_reader=get_reader(), feed=feed) is False + assert has_white_tags(custom_reader=get_reader(), feed=feed) is False, "Feed should not have any whitelist tags" check_if_has_tag(reader, feed, "whitelist_title") check_if_has_tag(reader, feed, "whitelist_summary") @@ -45,9 +45,9 @@ def test_has_white_tags() -> None: def check_if_has_tag(reader: Reader, feed: Feed, whitelist_name: str) -> None: reader.set_tag(feed, whitelist_name, "a") # type: ignore - assert has_white_tags(custom_reader=reader, feed=feed) is True + assert has_white_tags(custom_reader=reader, feed=feed) is True, "Feed should have whitelist tags" reader.delete_tag(feed, whitelist_name) - assert has_white_tags(custom_reader=reader, feed=feed) is False + assert has_white_tags(custom_reader=reader, feed=feed) is False, "Feed should not have any whitelist tags" def test_should_be_sent() -> None: @@ -61,51 +61,51 @@ def test_should_be_sent() -> None: # Get first entry first_entry: list[Entry] = [] entries: Iterable[Entry] = reader.get_entries(feed=feed) - assert entries is not None + assert entries is not None, "Entries should not be None" for entry in entries: first_entry.append(entry) break - assert len(first_entry) == 1 + assert len(first_entry) == 1, "First entry should be added" # Test entry without any whitelists - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.set_tag(feed, "whitelist_title", "fvnnnfnfdnfdnfd") # type: ignore - assert should_be_sent(reader, first_entry[0]) is True + assert should_be_sent(reader, first_entry[0]) is True, "Entry should be sent" reader.delete_tag(feed, "whitelist_title") - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.set_tag(feed, "whitelist_title", "åäö") # type: ignore - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.delete_tag(feed, "whitelist_title") - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.set_tag(feed, "whitelist_summary", "ffdnfdnfdnfdnfdndfn") # type: ignore - assert should_be_sent(reader, first_entry[0]) is True + assert should_be_sent(reader, first_entry[0]) is True, "Entry should be sent" reader.delete_tag(feed, "whitelist_summary") - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.set_tag(feed, "whitelist_summary", "åäö") # type: ignore - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.delete_tag(feed, "whitelist_summary") - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.set_tag(feed, "whitelist_content", "ffdnfdnfdnfdnfdndfn") # type: ignore - assert should_be_sent(reader, first_entry[0]) is True + assert should_be_sent(reader, first_entry[0]) is True, "Entry should be sent" reader.delete_tag(feed, "whitelist_content") - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.set_tag(feed, "whitelist_content", "åäö") # type: ignore - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.delete_tag(feed, "whitelist_content") - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.set_tag(feed, "whitelist_author", "TheLovinator") # type: ignore - assert should_be_sent(reader, first_entry[0]) is True + assert should_be_sent(reader, first_entry[0]) is True, "Entry should be sent" reader.delete_tag(feed, "whitelist_author") - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.set_tag(feed, "whitelist_author", "åäö") # type: ignore - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent" reader.delete_tag(feed, "whitelist_author") - assert should_be_sent(reader, first_entry[0]) is False + assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"