Stuff and things
This commit is contained in:
@ -6,7 +6,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from reader import Entry, Feed, Reader, make_reader
|
||||
|
||||
from discord_rss_bot.filter.blacklist import has_black_tags, should_be_skipped
|
||||
from discord_rss_bot.filter.blacklist import entry_should_be_skipped, feed_has_blacklist_tags
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
@ -33,7 +33,8 @@ def test_has_black_tags() -> None:
|
||||
reader.update_feeds()
|
||||
|
||||
# Test feed without any blacklist tags
|
||||
assert has_black_tags(custom_reader=get_reader(), feed=feed) is False
|
||||
assert_msg: str = "Feed should not have any blacklist tags"
|
||||
assert feed_has_blacklist_tags(custom_reader=get_reader(), feed=feed) is False, assert_msg
|
||||
|
||||
check_if_has_tag(reader, feed, "blacklist_title")
|
||||
check_if_has_tag(reader, feed, "blacklist_summary")
|
||||
@ -45,9 +46,12 @@ def test_has_black_tags() -> None:
|
||||
|
||||
def check_if_has_tag(reader: Reader, feed: Feed, blacklist_name: str) -> None:
|
||||
reader.set_tag(feed, blacklist_name, "a") # type: ignore
|
||||
assert has_black_tags(custom_reader=reader, feed=feed) is True
|
||||
assert_msg: str = f"Feed should have blacklist tags: {blacklist_name}"
|
||||
assert feed_has_blacklist_tags(custom_reader=reader, feed=feed) is True, assert_msg
|
||||
|
||||
asset_msg: str = f"Feed should not have any blacklist tags: {blacklist_name}"
|
||||
reader.delete_tag(feed, blacklist_name)
|
||||
assert has_black_tags(custom_reader=reader, feed=feed) is False
|
||||
assert feed_has_blacklist_tags(custom_reader=reader, feed=feed) is False, asset_msg
|
||||
|
||||
|
||||
def test_should_be_skipped() -> None:
|
||||
@ -61,51 +65,51 @@ def test_should_be_skipped() -> None:
|
||||
# Get first entry
|
||||
first_entry: list[Entry] = []
|
||||
entries: Iterable[Entry] = reader.get_entries(feed=feed)
|
||||
assert entries is not None
|
||||
assert entries is not None, f"Entries should not be None: {entries}"
|
||||
for entry in entries:
|
||||
first_entry.append(entry)
|
||||
break
|
||||
assert len(first_entry) == 1
|
||||
assert len(first_entry) == 1, f"First entry should be added: {first_entry}"
|
||||
|
||||
# Test entry without any blacklists
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
|
||||
reader.set_tag(feed, "blacklist_title", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
assert should_be_skipped(reader, first_entry[0]) is True
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is True, f"Entry should be skipped: {first_entry[0]}"
|
||||
reader.delete_tag(feed, "blacklist_title")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
|
||||
reader.set_tag(feed, "blacklist_title", "åäö") # type: ignore
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
reader.delete_tag(feed, "blacklist_title")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
|
||||
reader.set_tag(feed, "blacklist_summary", "ffdnfdnfdnfdnfdndfn") # type: ignore
|
||||
assert should_be_skipped(reader, first_entry[0]) is True
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is True, f"Entry should be skipped: {first_entry[0]}"
|
||||
reader.delete_tag(feed, "blacklist_summary")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
|
||||
reader.set_tag(feed, "blacklist_summary", "åäö") # type: ignore
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
reader.delete_tag(feed, "blacklist_summary")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
|
||||
reader.set_tag(feed, "blacklist_content", "ffdnfdnfdnfdnfdndfn") # type: ignore
|
||||
assert should_be_skipped(reader, first_entry[0]) is True
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is True, f"Entry should be skipped: {first_entry[0]}"
|
||||
reader.delete_tag(feed, "blacklist_content")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
|
||||
reader.set_tag(feed, "blacklist_content", "åäö") # type: ignore
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
reader.delete_tag(feed, "blacklist_content")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
|
||||
reader.set_tag(feed, "blacklist_author", "TheLovinator") # type: ignore
|
||||
assert should_be_skipped(reader, first_entry[0]) is True
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is True, f"Entry should be skipped: {first_entry[0]}"
|
||||
reader.delete_tag(feed, "blacklist_author")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
|
||||
reader.set_tag(feed, "blacklist_author", "åäö") # type: ignore
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
reader.delete_tag(feed, "blacklist_author")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
assert entry_should_be_skipped(reader, first_entry[0]) is False, f"Entry should not be skipped: {first_entry[0]}"
|
||||
|
@ -12,18 +12,26 @@ if TYPE_CHECKING:
|
||||
|
||||
def test_encode_url() -> None:
|
||||
# Test normal input
|
||||
assert encode_url("https://www.example.com") == r"https%3A//www.example.com"
|
||||
assert_msg: str = "Got: {encode_url('https://www.example.com')}, Expected: https%3A//www.example.com"
|
||||
assert encode_url("https://www.example.com") == r"https%3A//www.example.com", assert_msg
|
||||
|
||||
# Test input with spaces
|
||||
assert encode_url("https://www.example.com/my path") == r"https%3A//www.example.com/my%20path"
|
||||
assert_msg: str = (
|
||||
"Got: {encode_url('https://www.example.com/my path')}, Expected: https%3A//www.example.com/my%20path"
|
||||
)
|
||||
assert encode_url("https://www.example.com/my path") == r"https%3A//www.example.com/my%20path", assert_msg
|
||||
|
||||
# Test input with special characters
|
||||
assert_msg: str = f"Got: {encode_url('https://www.example.com/my path?q=abc&b=1')}, Expected: https%3A//www.example.com/my%20path%3Fq%3Dabc%26b%3D1" # noqa: E501
|
||||
assert (
|
||||
encode_url("https://www.example.com/my path?q=abc&b=1")
|
||||
== r"https%3A//www.example.com/my%20path%3Fq%3Dabc%26b%3D1"
|
||||
)
|
||||
), assert_msg
|
||||
|
||||
# Test empty input
|
||||
assert not encode_url("")
|
||||
assert not encode_url(""), "Got: True, Expected: False"
|
||||
# Test input as None
|
||||
assert not encode_url(None) # type: ignore
|
||||
assert not encode_url(None), "Got: True, Expected: False"
|
||||
|
||||
|
||||
def test_entry_is_whitelisted() -> None:
|
||||
@ -43,7 +51,7 @@ def test_entry_is_whitelisted() -> None:
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_title", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_whitelisted(entry) is True:
|
||||
assert entry.title == "fvnnnfnfdnfdnfd"
|
||||
assert entry.title == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.title}"
|
||||
break
|
||||
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_title")
|
||||
|
||||
@ -51,7 +59,7 @@ def test_entry_is_whitelisted() -> None:
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_summary", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_whitelisted(entry) is True:
|
||||
assert entry.summary == "fvnnnfnfdnfdnfd"
|
||||
assert entry.summary == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.summary}"
|
||||
break
|
||||
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_summary")
|
||||
|
||||
@ -59,7 +67,8 @@ def test_entry_is_whitelisted() -> None:
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_content", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_whitelisted(entry) is True:
|
||||
assert entry.content[0].value == "<p>ffdnfdnfdnfdnfdndfn</p>"
|
||||
assert_msg = f"Expected: <p>ffdnfdnfdnfdnfdndfn</p>, Got: {entry.content[0].value}"
|
||||
assert entry.content[0].value == "<p>ffdnfdnfdnfdnfdndfn</p>", assert_msg
|
||||
break
|
||||
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_content")
|
||||
|
||||
@ -84,7 +93,7 @@ def test_entry_is_blacklisted() -> None:
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_title", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_blacklisted(entry) is True:
|
||||
assert entry.title == "fvnnnfnfdnfdnfd"
|
||||
assert entry.title == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.title}"
|
||||
break
|
||||
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_title")
|
||||
|
||||
@ -92,7 +101,7 @@ def test_entry_is_blacklisted() -> None:
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_summary", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_blacklisted(entry) is True:
|
||||
assert entry.summary == "fvnnnfnfdnfdnfd"
|
||||
assert entry.summary == "fvnnnfnfdnfdnfd", f"Expected: fvnnnfnfdnfdnfd, Got: {entry.summary}"
|
||||
break
|
||||
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_summary")
|
||||
|
||||
@ -100,7 +109,8 @@ def test_entry_is_blacklisted() -> None:
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_content", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_blacklisted(entry) is True:
|
||||
assert entry.content[0].value == "<p>ffdnfdnfdnfdnfdndfn</p>"
|
||||
assert_msg = f"Expected: <p>ffdnfdnfdnfdnfdndfn</p>, Got: {entry.content[0].value}"
|
||||
assert entry.content[0].value == "<p>ffdnfdnfdnfdnfdndfn</p>", assert_msg
|
||||
break
|
||||
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_content")
|
||||
|
||||
|
@ -3,11 +3,12 @@ from __future__ import annotations
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import LiteralString
|
||||
|
||||
import pytest
|
||||
from reader import Feed, Reader, make_reader # type: ignore
|
||||
|
||||
from discord_rss_bot.feeds import send_to_discord
|
||||
from discord_rss_bot.feeds import send_to_discord, truncate_webhook_message
|
||||
from discord_rss_bot.missing_tags import add_missing_tags
|
||||
|
||||
|
||||
@ -16,11 +17,11 @@ def test_send_to_discord() -> None:
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
# Create the temp directory.
|
||||
Path.mkdir(Path(temp_dir), exist_ok=True)
|
||||
assert Path.exists(Path(temp_dir))
|
||||
assert Path.exists(Path(temp_dir)), f"The directory '{temp_dir}' should exist."
|
||||
|
||||
# Create a temporary reader.
|
||||
reader: Reader = make_reader(url=str(Path(temp_dir) / "test_db.sqlite"))
|
||||
assert reader is not None
|
||||
assert reader is not None, "The reader should not be None."
|
||||
|
||||
# Add a feed to the reader.
|
||||
reader.add_feed("https://www.reddit.com/r/Python/.rss")
|
||||
@ -32,7 +33,7 @@ def test_send_to_discord() -> None:
|
||||
|
||||
# Get the feed.
|
||||
feed: Feed = reader.get_feed("https://www.reddit.com/r/Python/.rss")
|
||||
assert feed is not None
|
||||
assert feed is not None, f"The feed should not be None. Got: {feed}"
|
||||
|
||||
# Get the webhook.
|
||||
webhook_url: str | None = os.environ.get("TEST_WEBHOOK_URL")
|
||||
@ -41,14 +42,46 @@ def test_send_to_discord() -> None:
|
||||
reader.close()
|
||||
pytest.skip("No webhook URL provided.")
|
||||
|
||||
assert webhook_url is not None
|
||||
assert webhook_url is not None, f"The webhook URL should not be None. Got: {webhook_url}"
|
||||
|
||||
# Add tag to the feed and check if it is there.
|
||||
reader.set_tag(feed, "webhook", webhook_url) # type: ignore
|
||||
assert reader.get_tag(feed, "webhook") == webhook_url # type: ignore
|
||||
assert reader.get_tag(feed, "webhook") == webhook_url, f"The webhook URL should be '{webhook_url}'."
|
||||
|
||||
# Send the feed to Discord.
|
||||
send_to_discord(custom_reader=reader, feed=feed, do_once=True)
|
||||
|
||||
# Close the reader, so we can delete the directory.
|
||||
reader.close()
|
||||
|
||||
|
||||
def test_truncate_webhook_message_short_message():
|
||||
message = "This is a short message."
|
||||
assert_msg = "The message should remain unchanged if it's less than 4000 characters."
|
||||
assert truncate_webhook_message(message) == message, assert_msg
|
||||
|
||||
|
||||
def test_truncate_webhook_message_exact_length():
|
||||
message: LiteralString = "A" * 4000 # Exact length of max_content_length
|
||||
assert_msg: str = f"The message should remain unchanged if it's exactly {4000} characters."
|
||||
assert truncate_webhook_message(message) == message, assert_msg
|
||||
|
||||
|
||||
def test_truncate_webhook_message_long_message():
|
||||
message: str = "A" * 4100 # Exceeds max_content_length
|
||||
truncated_message: str = truncate_webhook_message(message)
|
||||
|
||||
# Ensure the truncated message length is correct
|
||||
assert_msg = "The length of the truncated message should be between 3999 and 4000."
|
||||
assert 3999 <= len(truncated_message) <= 4000, assert_msg
|
||||
|
||||
# Calculate half length for the truncated parts
|
||||
half_length = (4000 - 3) // 2
|
||||
|
||||
# Test the beginning of the message
|
||||
assert_msg = "The beginning of the truncated message should match the original message."
|
||||
assert truncated_message[:half_length] == "A" * half_length, assert_msg
|
||||
|
||||
# Test the end of the message
|
||||
assert_msg = "The end of the truncated message should be '...' to indicate truncation."
|
||||
assert truncated_message[-half_length:] == "A" * half_length, assert_msg
|
||||
|
@ -1,8 +1,9 @@
|
||||
from typing import TYPE_CHECKING, Literal
|
||||
import urllib.parse
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from discord_rss_bot.main import app, encode_url
|
||||
from discord_rss_bot.main import app
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from httpx import Response
|
||||
@ -11,7 +12,10 @@ client: TestClient = TestClient(app)
|
||||
webhook_name: str = "Hello, I am a webhook!"
|
||||
webhook_url: str = "https://discord.com/api/webhooks/1234567890/abcdefghijklmnopqrstuvwxyz"
|
||||
feed_url: str = "https://lovinator.space/rss_test.xml"
|
||||
encoded_feed_url: str = encode_url(feed_url)
|
||||
|
||||
|
||||
def encoded_feed_url(url: str) -> str:
|
||||
return urllib.parse.quote(feed_url) if url else ""
|
||||
|
||||
|
||||
def test_search() -> None:
|
||||
@ -20,7 +24,7 @@ def test_search() -> None:
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)})
|
||||
|
||||
# Delete the webhook if it already exists before we run the test.
|
||||
response: Response = client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
||||
@ -30,27 +34,20 @@ def test_search() -> None:
|
||||
url="/add_webhook",
|
||||
data={"webhook_name": webhook_name, "webhook_url": webhook_url},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to add webhook: {response.text}"
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to add feed: {response.text}"
|
||||
|
||||
# Check that the feed was added.
|
||||
response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
assert response.status_code == 200, f"Failed to get /: {response.text}"
|
||||
assert feed_url in response.text, f"Feed not found in /: {response.text}"
|
||||
|
||||
# Search for an entry.
|
||||
response: Response = client.get(url="/search/?query=a")
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
def test_encode_url() -> None:
|
||||
"""Test the encode_url function."""
|
||||
before: Literal["https://www.google.com/"] = "https://www.google.com/"
|
||||
after: Literal["https%3A//www.google.com/"] = "https%3A//www.google.com/"
|
||||
assert encode_url(url_to_quote=before) == after
|
||||
assert response.status_code == 200, f"Failed to search for entry: {response.text}"
|
||||
|
||||
|
||||
def test_add_webhook() -> None:
|
||||
@ -63,12 +60,12 @@ def test_add_webhook() -> None:
|
||||
url="/add_webhook",
|
||||
data={"webhook_name": webhook_name, "webhook_url": webhook_url},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to add webhook: {response.text}"
|
||||
|
||||
# Check that the webhook was added.
|
||||
response = client.get(url="/webhooks")
|
||||
assert response.status_code == 200
|
||||
assert webhook_name in response.text
|
||||
assert response.status_code == 200, f"Failed to get /webhooks: {response.text}"
|
||||
assert webhook_name in response.text, f"Webhook not found in /webhooks: {response.text}"
|
||||
|
||||
|
||||
def test_create_feed() -> None:
|
||||
@ -77,16 +74,16 @@ def test_create_feed() -> None:
|
||||
feeds: Response = client.get(url="/")
|
||||
if feed_url in feeds.text:
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to add feed: {response.text}"
|
||||
|
||||
# Check that the feed was added.
|
||||
response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
assert response.status_code == 200, f"Failed to get /: {response.text}"
|
||||
assert feed_url in response.text, f"Feed not found in /: {response.text}"
|
||||
|
||||
|
||||
def test_get() -> None:
|
||||
@ -95,46 +92,43 @@ def test_get() -> None:
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to add feed: {response.text}"
|
||||
|
||||
# Check that the feed was added.
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
assert response.status_code == 200, f"Failed to get /: {response.text}"
|
||||
assert feed_url in response.text, f"Feed not found in /: {response.text}"
|
||||
|
||||
response: Response = client.get(url="/add")
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"/add failed: {response.text}"
|
||||
|
||||
response: Response = client.get(url="/add_webhook")
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"/add_webhook failed: {response.text}"
|
||||
|
||||
response: Response = client.get(url="/blacklist", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
response: Response = client.get(url="/blacklist", params={"feed_url": encoded_feed_url(feed_url)})
|
||||
assert response.status_code == 200, f"/blacklist failed: {response.text}"
|
||||
|
||||
response: Response = client.get(url="/custom", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
response: Response = client.get(url="/custom", params={"feed_url": encoded_feed_url(feed_url)})
|
||||
assert response.status_code == 200, f"/custom failed: {response.text}"
|
||||
|
||||
response: Response = client.get(url="/embed", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
response: Response = client.get(url="/embed", params={"feed_url": encoded_feed_url(feed_url)})
|
||||
assert response.status_code == 200, f"/embed failed: {response.text}"
|
||||
|
||||
response: Response = client.get(url="/feed", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
response: Response = client.get(url="/feed_more", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
response: Response = client.get(url="/feed", params={"feed_url": encoded_feed_url(feed_url)})
|
||||
assert response.status_code == 200, f"/feed failed: {response.text}"
|
||||
|
||||
response: Response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"/ failed: {response.text}"
|
||||
|
||||
response: Response = client.get(url="/webhooks")
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"/webhooks failed: {response.text}"
|
||||
|
||||
response: Response = client.get(url="/whitelist", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
response: Response = client.get(url="/whitelist", params={"feed_url": encoded_feed_url(feed_url)})
|
||||
assert response.status_code == 200, f"/whitelist failed: {response.text}"
|
||||
|
||||
|
||||
def test_pause_feed() -> None:
|
||||
@ -143,7 +137,7 @@ def test_pause_feed() -> None:
|
||||
feeds: Response = client.get(url="/")
|
||||
if feed_url in feeds.text:
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
@ -152,16 +146,16 @@ def test_pause_feed() -> None:
|
||||
feeds: Response = client.get(url="/")
|
||||
if "Paused" in feeds.text:
|
||||
response: Response = client.post(url="/unpause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to unpause feed: {response.text}"
|
||||
|
||||
# Pause the feed.
|
||||
response: Response = client.post(url="/pause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to pause feed: {response.text}"
|
||||
|
||||
# Check that the feed was paused.
|
||||
response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
assert response.status_code == 200, f"Failed to get /: {response.text}"
|
||||
assert feed_url in response.text, f"Feed not found in /: {response.text}"
|
||||
|
||||
|
||||
def test_unpause_feed() -> None:
|
||||
@ -170,7 +164,7 @@ def test_unpause_feed() -> None:
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
@ -179,16 +173,16 @@ def test_unpause_feed() -> None:
|
||||
feeds: Response = client.get(url="/")
|
||||
if "Paused" not in feeds.text:
|
||||
response: Response = client.post(url="/pause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to pause feed: {response.text}"
|
||||
|
||||
# Unpause the feed.
|
||||
response: Response = client.post(url="/unpause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to unpause feed: {response.text}"
|
||||
|
||||
# Check that the feed was unpaused.
|
||||
response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
assert response.status_code == 200, f"Failed to get /: {response.text}"
|
||||
assert feed_url in response.text, f"Feed not found in /: {response.text}"
|
||||
|
||||
|
||||
def test_remove_feed() -> None:
|
||||
@ -197,19 +191,19 @@ def test_remove_feed() -> None:
|
||||
feeds: Response = client.get(url="/")
|
||||
if feed_url in feeds.text:
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url(feed_url)})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
|
||||
# Remove the feed.
|
||||
response: Response = client.post(url="/remove", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to remove feed: {response.text}"
|
||||
|
||||
# Check that the feed was removed.
|
||||
response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url not in response.text
|
||||
assert response.status_code == 200, f"Failed to get /: {response.text}"
|
||||
assert feed_url not in response.text, f"Feed found in /: {response.text}"
|
||||
|
||||
|
||||
def test_delete_webhook() -> None:
|
||||
@ -227,9 +221,9 @@ def test_delete_webhook() -> None:
|
||||
|
||||
# Delete the webhook.
|
||||
response: Response = client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
||||
assert response.status_code == 200
|
||||
assert response.status_code == 200, f"Failed to delete webhook: {response.text}"
|
||||
|
||||
# Check that the webhook was added.
|
||||
response = client.get(url="/webhooks")
|
||||
assert response.status_code == 200
|
||||
assert webhook_name not in response.text
|
||||
assert response.status_code == 200, f"Failed to get /webhooks: {response.text}"
|
||||
assert webhook_name not in response.text, f"Webhook found in /webhooks: {response.text}"
|
||||
|
@ -16,26 +16,26 @@ def test_create_html_for_search_results() -> None:
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
# Create the temp directory.
|
||||
Path.mkdir(Path(temp_dir), exist_ok=True)
|
||||
assert Path.exists(Path(temp_dir))
|
||||
assert Path.exists(Path(temp_dir)), f"The directory '{temp_dir}' should exist."
|
||||
|
||||
# Create a temporary reader.
|
||||
reader: Reader = make_reader(url=str(Path(temp_dir, "test_db.sqlite")))
|
||||
assert reader is not None
|
||||
assert reader is not None, "The reader should not be None."
|
||||
|
||||
# Add a feed to the reader.
|
||||
reader.add_feed("https://lovinator.space/rss_test.xml", exist_ok=True)
|
||||
|
||||
# Check that the feed was added.
|
||||
feeds: Iterable[Feed] = reader.get_feeds()
|
||||
assert feeds is not None
|
||||
assert len(list(feeds)) == 1
|
||||
assert feeds is not None, f"The feeds should not be None. Got: {feeds}"
|
||||
assert len(list(feeds)) == 1, f"The number of feeds should be 1. Got: {len(list(feeds))}"
|
||||
|
||||
# Update the feed to get the entries.
|
||||
reader.update_feeds()
|
||||
|
||||
# Get the feed.
|
||||
feed: Feed = reader.get_feed("https://lovinator.space/rss_test.xml")
|
||||
assert feed is not None
|
||||
assert feed is not None, f"The feed should not be None. Got: {feed}"
|
||||
|
||||
# Update the search index.
|
||||
reader.enable_search()
|
||||
@ -43,8 +43,8 @@ def test_create_html_for_search_results() -> None:
|
||||
|
||||
# Create the HTML and check if it is not empty.
|
||||
search_html: str = create_html_for_search_results("a", reader)
|
||||
assert search_html is not None
|
||||
assert len(search_html) > 10
|
||||
assert search_html is not None, f"The search HTML should not be None. Got: {search_html}"
|
||||
assert len(search_html) > 10, f"The search HTML should be longer than 10 characters. Got: {len(search_html)}"
|
||||
|
||||
# Close the reader, so we can delete the directory.
|
||||
reader.close()
|
||||
|
@ -10,7 +10,7 @@ from discord_rss_bot.settings import data_dir, default_custom_message, get_reade
|
||||
def test_reader() -> None:
|
||||
"""Test the reader."""
|
||||
reader: Reader = get_reader()
|
||||
assert isinstance(reader, Reader)
|
||||
assert isinstance(reader, Reader), f"The reader should be an instance of Reader. But it was '{type(reader)}'."
|
||||
|
||||
# Test the reader with a custom location.
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
@ -19,7 +19,8 @@ def test_reader() -> None:
|
||||
|
||||
custom_loc: pathlib.Path = pathlib.Path(temp_dir, "custom_loc_db.sqlite")
|
||||
custom_reader: Reader = get_reader(custom_location=str(custom_loc))
|
||||
assert isinstance(custom_reader, Reader)
|
||||
assert_msg = f"The custom reader should be an instance of Reader. But it was '{type(custom_reader)}'."
|
||||
assert isinstance(custom_reader, Reader), assert_msg
|
||||
|
||||
# Close the reader, so we can delete the directory.
|
||||
custom_reader.close()
|
||||
@ -27,12 +28,13 @@ def test_reader() -> None:
|
||||
|
||||
def test_data_dir() -> None:
|
||||
"""Test the data directory."""
|
||||
assert Path.exists(Path(data_dir))
|
||||
assert Path.exists(Path(data_dir)), f"The data directory '{data_dir}' should exist."
|
||||
|
||||
|
||||
def test_default_custom_message() -> None:
|
||||
"""Test the default custom message."""
|
||||
assert default_custom_message == "{{entry_title}}\n{{entry_link}}"
|
||||
assert_msg = f"The default custom message should be '{{entry_title}}\n{{entry_link}}'. But it was '{default_custom_message}'." # noqa: E501
|
||||
assert default_custom_message == "{{entry_title}}\n{{entry_link}}", assert_msg
|
||||
|
||||
|
||||
def test_get_webhook_for_entry() -> None:
|
||||
@ -52,7 +54,7 @@ def test_get_webhook_for_entry() -> None:
|
||||
# Add a webhook to the database.
|
||||
custom_reader.set_tag("https://www.reddit.com/r/movies.rss", "webhook", "https://example.com") # type: ignore
|
||||
our_tag: str = custom_reader.get_tag("https://www.reddit.com/r/movies.rss", "webhook") # type: ignore
|
||||
assert our_tag == "https://example.com"
|
||||
assert our_tag == "https://example.com", f"The tag should be 'https://example.com'. But it was '{our_tag}'."
|
||||
|
||||
# Close the reader, so we can delete the directory.
|
||||
custom_reader.close()
|
||||
|
@ -2,11 +2,13 @@ from discord_rss_bot.filter.utils import is_word_in_text
|
||||
|
||||
|
||||
def test_is_word_in_text() -> None:
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing word1 and word2.") is True
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing word1.") is True
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing word2.") is True
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing WORD1 and WORD2.") is True
|
||||
assert is_word_in_text("Alert,Forma", "Outbreak - Mutagen Mass - Rhea (Saturn)") is False
|
||||
msg_true = "Should return True"
|
||||
msg_false = "Should return False"
|
||||
|
||||
assert is_word_in_text("Alert,Forma", "Outbreak - Mutagen Mass - Rhea (Saturn)") is False
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing none of the words.") is False
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing word1 and word2.") is True, msg_true
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing word1.") is True, msg_true
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing word2.") is True, msg_true
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing WORD1 and WORD2.") is True, msg_true
|
||||
assert is_word_in_text("Alert,Forma", "Outbreak - Mutagen Mass - Rhea (Saturn)") is False, msg_false
|
||||
assert is_word_in_text("Alert,Forma", "Outbreak - Mutagen Mass - Rhea (Saturn)") is False, msg_false
|
||||
assert is_word_in_text("word1,word2", "This is a sample text containing none of the words.") is False, msg_false
|
||||
|
@ -33,7 +33,7 @@ def test_has_white_tags() -> None:
|
||||
reader.update_feeds()
|
||||
|
||||
# Test feed without any whitelist tags
|
||||
assert has_white_tags(custom_reader=get_reader(), feed=feed) is False
|
||||
assert has_white_tags(custom_reader=get_reader(), feed=feed) is False, "Feed should not have any whitelist tags"
|
||||
|
||||
check_if_has_tag(reader, feed, "whitelist_title")
|
||||
check_if_has_tag(reader, feed, "whitelist_summary")
|
||||
@ -45,9 +45,9 @@ def test_has_white_tags() -> None:
|
||||
|
||||
def check_if_has_tag(reader: Reader, feed: Feed, whitelist_name: str) -> None:
|
||||
reader.set_tag(feed, whitelist_name, "a") # type: ignore
|
||||
assert has_white_tags(custom_reader=reader, feed=feed) is True
|
||||
assert has_white_tags(custom_reader=reader, feed=feed) is True, "Feed should have whitelist tags"
|
||||
reader.delete_tag(feed, whitelist_name)
|
||||
assert has_white_tags(custom_reader=reader, feed=feed) is False
|
||||
assert has_white_tags(custom_reader=reader, feed=feed) is False, "Feed should not have any whitelist tags"
|
||||
|
||||
|
||||
def test_should_be_sent() -> None:
|
||||
@ -61,51 +61,51 @@ def test_should_be_sent() -> None:
|
||||
# Get first entry
|
||||
first_entry: list[Entry] = []
|
||||
entries: Iterable[Entry] = reader.get_entries(feed=feed)
|
||||
assert entries is not None
|
||||
assert entries is not None, "Entries should not be None"
|
||||
for entry in entries:
|
||||
first_entry.append(entry)
|
||||
break
|
||||
assert len(first_entry) == 1
|
||||
assert len(first_entry) == 1, "First entry should be added"
|
||||
|
||||
# Test entry without any whitelists
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
|
||||
reader.set_tag(feed, "whitelist_title", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
assert should_be_sent(reader, first_entry[0]) is True
|
||||
assert should_be_sent(reader, first_entry[0]) is True, "Entry should be sent"
|
||||
reader.delete_tag(feed, "whitelist_title")
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
|
||||
reader.set_tag(feed, "whitelist_title", "åäö") # type: ignore
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
reader.delete_tag(feed, "whitelist_title")
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
|
||||
reader.set_tag(feed, "whitelist_summary", "ffdnfdnfdnfdnfdndfn") # type: ignore
|
||||
assert should_be_sent(reader, first_entry[0]) is True
|
||||
assert should_be_sent(reader, first_entry[0]) is True, "Entry should be sent"
|
||||
reader.delete_tag(feed, "whitelist_summary")
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
|
||||
reader.set_tag(feed, "whitelist_summary", "åäö") # type: ignore
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
reader.delete_tag(feed, "whitelist_summary")
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
|
||||
reader.set_tag(feed, "whitelist_content", "ffdnfdnfdnfdnfdndfn") # type: ignore
|
||||
assert should_be_sent(reader, first_entry[0]) is True
|
||||
assert should_be_sent(reader, first_entry[0]) is True, "Entry should be sent"
|
||||
reader.delete_tag(feed, "whitelist_content")
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
|
||||
reader.set_tag(feed, "whitelist_content", "åäö") # type: ignore
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
reader.delete_tag(feed, "whitelist_content")
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
|
||||
reader.set_tag(feed, "whitelist_author", "TheLovinator") # type: ignore
|
||||
assert should_be_sent(reader, first_entry[0]) is True
|
||||
assert should_be_sent(reader, first_entry[0]) is True, "Entry should be sent"
|
||||
reader.delete_tag(feed, "whitelist_author")
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
|
||||
reader.set_tag(feed, "whitelist_author", "åäö") # type: ignore
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
reader.delete_tag(feed, "whitelist_author")
|
||||
assert should_be_sent(reader, first_entry[0]) is False
|
||||
assert should_be_sent(reader, first_entry[0]) is False, "Entry should not be sent"
|
||||
|
Reference in New Issue
Block a user