Add more tests
This commit is contained in:
141
tests/test_blacklist.py
Normal file
141
tests/test_blacklist.py
Normal file
@ -0,0 +1,141 @@
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Iterable
|
||||
|
||||
from reader import Entry, Feed, Reader, make_reader
|
||||
|
||||
from discord_rss_bot.filter.blacklist import (
|
||||
get_blacklist_content,
|
||||
get_blacklist_summary,
|
||||
get_blacklist_title,
|
||||
has_black_tags,
|
||||
should_be_skipped,
|
||||
)
|
||||
|
||||
feed_url = "https://lovinator.space/rss_test.xml"
|
||||
|
||||
|
||||
# Create the database
|
||||
def get_reader() -> Reader:
|
||||
tempdir: Path = Path(tempfile.mkdtemp())
|
||||
|
||||
reader_database: Path = tempdir / "test.sqlite"
|
||||
reader: Reader = make_reader(url=str(reader_database))
|
||||
|
||||
return reader
|
||||
|
||||
|
||||
def test_has_black_tags():
|
||||
reader: Reader = get_reader()
|
||||
|
||||
# Add feed and update entries
|
||||
reader.add_feed(feed_url)
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
reader.update_feeds()
|
||||
|
||||
# Test feed without any blacklist tags
|
||||
assert has_black_tags(custom_reader=get_reader(), feed=feed) is False
|
||||
|
||||
check_if_has_tag(reader, feed, "blacklist_title")
|
||||
check_if_has_tag(reader, feed, "blacklist_summary")
|
||||
check_if_has_tag(reader, feed, "blacklist_content")
|
||||
|
||||
# Clean up
|
||||
reader.delete_feed(feed_url)
|
||||
|
||||
|
||||
def check_if_has_tag(reader, feed, blacklist_name):
|
||||
reader.set_tag(feed, blacklist_name, "a")
|
||||
assert has_black_tags(custom_reader=reader, feed=feed) is True
|
||||
reader.delete_tag(feed, blacklist_name)
|
||||
assert has_black_tags(custom_reader=reader, feed=feed) is False
|
||||
|
||||
|
||||
def test_should_be_skipped():
|
||||
reader: Reader = get_reader()
|
||||
|
||||
# Add feed and update entries
|
||||
reader.add_feed(feed_url)
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
reader.update_feeds()
|
||||
|
||||
# Get first entry
|
||||
first_entry: list[Entry] = []
|
||||
entries: Iterable[Entry] = reader.get_entries(feed=feed)
|
||||
assert entries is not None
|
||||
for entry in entries:
|
||||
first_entry.append(entry)
|
||||
break
|
||||
assert len(first_entry) == 1
|
||||
|
||||
# Test entry without any blacklists
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
|
||||
reader.set_tag(feed, "blacklist_title", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
assert should_be_skipped(reader, first_entry[0]) is True
|
||||
reader.delete_tag(feed, "blacklist_title")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
|
||||
reader.set_tag(feed, "blacklist_summary", "ffdnfdnfdnfdnfdndfn") # type: ignore
|
||||
assert should_be_skipped(reader, first_entry[0]) is True
|
||||
reader.delete_tag(feed, "blacklist_summary")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
|
||||
reader.set_tag(feed, "blacklist_content", "ffdnfdnfdnfdnfdndfn") # type: ignore
|
||||
# TODO: This is not impelemented yes
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
reader.delete_tag(feed, "blacklist_content")
|
||||
assert should_be_skipped(reader, first_entry[0]) is False
|
||||
|
||||
# TODO: Also add support for entry_text
|
||||
|
||||
|
||||
def test_get_blacklist_content():
|
||||
reader: Reader = get_reader()
|
||||
|
||||
# Add feed and update entries
|
||||
reader.add_feed(feed_url)
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
reader.update_feeds()
|
||||
|
||||
assert get_blacklist_content(reader, feed) == "" # type: ignore
|
||||
|
||||
reader.set_tag(feed, "blacklist_content", "ffdnfdnfdnfdnfdndfn") # type: ignore
|
||||
assert get_blacklist_content(reader, feed) == "ffdnfdnfdnfdnfdndfn" # type: ignore
|
||||
|
||||
reader.delete_tag(feed, "blacklist_content")
|
||||
assert get_blacklist_content(reader, feed) == "" # type: ignore
|
||||
|
||||
|
||||
def test_get_blacklist_summary():
|
||||
reader: Reader = get_reader()
|
||||
|
||||
# Add feed and update entries
|
||||
reader.add_feed(feed_url)
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
reader.update_feeds()
|
||||
|
||||
assert get_blacklist_summary(reader, feed) == "" # type: ignore
|
||||
|
||||
reader.set_tag(feed, "blacklist_summary", "ffdnfdnfdnfdnfdndfn") # type: ignore
|
||||
assert get_blacklist_summary(reader, feed) == "ffdnfdnfdnfdnfdndfn" # type: ignore
|
||||
|
||||
reader.delete_tag(feed, "blacklist_summary")
|
||||
assert get_blacklist_summary(reader, feed) == "" # type: ignore
|
||||
|
||||
|
||||
def test_get_blacklist_title():
|
||||
reader: Reader = get_reader()
|
||||
|
||||
# Add feed and update entries
|
||||
reader.add_feed(feed_url)
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
reader.update_feeds()
|
||||
|
||||
assert get_blacklist_title(reader, feed) == "" # type: ignore
|
||||
|
||||
reader.set_tag(feed, "blacklist_title", "ffdnfdnfdnfdnfdndfn") # type: ignore
|
||||
assert get_blacklist_title(reader, feed) == "ffdnfdnfdnfdnfdndfn" # type: ignore
|
||||
|
||||
reader.delete_tag(feed, "blacklist_title")
|
||||
assert get_blacklist_title(reader, feed) == "" # type: ignore
|
@ -6,6 +6,10 @@ from httpx import Response
|
||||
from discord_rss_bot.main import app, encode_url
|
||||
|
||||
client: TestClient = TestClient(app)
|
||||
webhook_name: str = "Hello, I am a webhook!"
|
||||
webhook_url: str = "https://discord.com/api/webhooks/1234567890/abcdefghijklmnopqrstuvwxyz"
|
||||
feed_url: str = "https://lovinator.space/rss_test.xml"
|
||||
encoded_feed_url: str = encode_url(webhook_url)
|
||||
|
||||
|
||||
def test_read_main() -> None:
|
||||
@ -14,14 +18,31 @@ def test_read_main() -> None:
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
def test_add() -> None:
|
||||
"""Test the /add page."""
|
||||
response: Response = client.get("/add")
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
def test_search() -> None:
|
||||
"""Test the /search page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Delete the webhook if it already exists before we run the test.
|
||||
response: Response = client.post("/delete_webhook", data={"webhook_url": webhook_url})
|
||||
|
||||
# Add the webhook.
|
||||
response: Response = client.post("/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was added.
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
|
||||
# Search for an entry.
|
||||
response: Response = client.get("/search/?query=a")
|
||||
assert response.status_code == 200
|
||||
|
||||
@ -31,3 +52,131 @@ def test_encode_url() -> None:
|
||||
before: Literal["https://www.google.com/"] = "https://www.google.com/"
|
||||
after: Literal["https%3A//www.google.com/"] = "https%3A//www.google.com/"
|
||||
assert encode_url(url_to_quote=before) == after
|
||||
|
||||
|
||||
def test_add_webhook() -> None:
|
||||
"""Test the /add_webhook page."""
|
||||
# Delete the webhook if it already exists before we run the test.
|
||||
response: Response = client.post("/delete_webhook", data={"webhook_url": webhook_url})
|
||||
|
||||
# Add the webhook.
|
||||
response: Response = client.post("/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the webhook was added.
|
||||
response = client.get("/webhooks")
|
||||
assert response.status_code == 200
|
||||
assert webhook_name in response.text
|
||||
|
||||
|
||||
def test_create_feed() -> None:
|
||||
"""Test the /create_feed page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was added.
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
|
||||
|
||||
def test_pause_feed() -> None:
|
||||
"""Test the /pause_feed page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
|
||||
# Unpause the feed if it is paused.
|
||||
feeds: Response = client.get("/")
|
||||
if "Paused" in feeds.text:
|
||||
response: Response = client.post("/unpause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Pause the feed.
|
||||
response: Response = client.post("/pause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was paused.
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
|
||||
|
||||
def test_unpause_feed() -> None:
|
||||
"""Test the /unpause_feed page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
|
||||
# Pause the feed if it is unpaused.
|
||||
feeds: Response = client.get("/")
|
||||
if "Paused" not in feeds.text:
|
||||
response: Response = client.post("/pause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Unpause the feed.
|
||||
response: Response = client.post("/unpause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was unpaused.
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
|
||||
|
||||
def test_delete_webhook():
|
||||
"""Test the /delete_webhook page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/webhooks")
|
||||
if webhook_url in feeds.text:
|
||||
client.post("/delete_webhook", data={"webhook_url": webhook_url})
|
||||
|
||||
# Add the webhook.
|
||||
response: Response = client.post("/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url})
|
||||
|
||||
# Delete the webhook.
|
||||
response: Response = client.post("/delete_webhook", data={"webhook_url": webhook_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the webhook was added.
|
||||
response = client.get("/webhooks")
|
||||
assert response.status_code == 200
|
||||
assert webhook_name not in response.text
|
||||
|
||||
|
||||
def test_remove_feed():
|
||||
"""Test the /remove page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
|
||||
# Remove the feed.
|
||||
response: Response = client.post("/remove", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was removed.
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url not in response.text
|
||||
|
@ -3,7 +3,7 @@ import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Iterable
|
||||
|
||||
from reader import EntrySearchResult, Feed, Reader, make_reader # type: ignore
|
||||
from reader import EntrySearchResult, Feed, Reader, make_reader
|
||||
|
||||
from discord_rss_bot.search import create_html_for_search_results
|
||||
|
||||
@ -21,13 +21,18 @@ def test_create_html_for_search_results() -> None:
|
||||
assert reader is not None
|
||||
|
||||
# Add a feed to the reader.
|
||||
reader.add_feed("https://www.reddit.com/r/Python/.rss")
|
||||
reader.add_feed("https://lovinator.space/rss_test.xml", exist_ok=True)
|
||||
|
||||
# Check that the feed was added.
|
||||
feeds: Iterable[Feed] = reader.get_feeds()
|
||||
assert feeds is not None
|
||||
assert len(list(feeds)) == 1
|
||||
|
||||
# Update the feed to get the entries.
|
||||
reader.update_feeds()
|
||||
|
||||
# Get the feed.
|
||||
feed: Feed = reader.get_feed("https://www.reddit.com/r/Python/.rss")
|
||||
feed: Feed = reader.get_feed("https://lovinator.space/rss_test.xml")
|
||||
assert feed is not None
|
||||
|
||||
# Update the search index.
|
||||
|
Reference in New Issue
Block a user