Add tests
This commit is contained in:
@ -24,27 +24,31 @@ Exceptions:
|
|||||||
from typing import Iterable
|
from typing import Iterable
|
||||||
|
|
||||||
from discord_webhook import DiscordWebhook
|
from discord_webhook import DiscordWebhook
|
||||||
from reader import Entry
|
from reader import Entry, Reader
|
||||||
from requests import Response
|
from requests import Response
|
||||||
|
|
||||||
from discord_rss_bot.settings import reader
|
from discord_rss_bot.settings import get_reader
|
||||||
|
|
||||||
|
|
||||||
def send_to_discord(feed=None) -> None:
|
def send_to_discord(reader: Reader = None, feed=None, do_once=False) -> None:
|
||||||
"""
|
"""
|
||||||
Send entries to Discord.
|
Send entries to Discord.
|
||||||
|
|
||||||
If response was not ok, we will log the error and mark the entry as unread, so it will be sent again next time.
|
If response was not ok, we will log the error and mark the entry as unread, so it will be sent again next time.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
reader: If we should use a custom reader instead of the default one.
|
||||||
feed: The entry to send.
|
feed: The entry to send.
|
||||||
|
do_once: If we should only send one entry. This is used in the test.
|
||||||
Raises:
|
|
||||||
NoWebhookFoundError: If no webhook is found.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Response: The response from the webhook.
|
Response: The response from the webhook.
|
||||||
"""
|
"""
|
||||||
|
# Get the default reader if we didn't get a custom one.
|
||||||
|
if reader is None:
|
||||||
|
reader = get_reader()
|
||||||
|
|
||||||
|
# If we should get all entries, or just the entries from a specific feed.
|
||||||
if feed is None:
|
if feed is None:
|
||||||
reader.update_feeds()
|
reader.update_feeds()
|
||||||
entries: Iterable[Entry] = reader.get_entries(read=False)
|
entries: Iterable[Entry] = reader.get_entries(read=False)
|
||||||
@ -53,13 +57,21 @@ def send_to_discord(feed=None) -> None:
|
|||||||
entries: Iterable[Entry] = reader.get_entries(feed=feed, read=False)
|
entries: Iterable[Entry] = reader.get_entries(feed=feed, read=False)
|
||||||
|
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
|
# Set the webhook to read, so we don't send it again.
|
||||||
reader.set_entry_read(entry, True)
|
reader.set_entry_read(entry, True)
|
||||||
|
|
||||||
|
# Get the webhook from the feed.
|
||||||
webhook_url: str = str(reader.get_tag(entry.feed_url, "webhook"))
|
webhook_url: str = str(reader.get_tag(entry.feed_url, "webhook"))
|
||||||
webhook_message: str = f":robot: :mega: {entry.title}\n{entry.link}"
|
webhook_message: str = f":robot: :mega: {entry.title}\n{entry.link}"
|
||||||
webhook: DiscordWebhook = DiscordWebhook(url=webhook_url, content=webhook_message, rate_limit_retry=True)
|
webhook: DiscordWebhook = DiscordWebhook(url=webhook_url, content=webhook_message, rate_limit_retry=True)
|
||||||
|
|
||||||
|
# Send the webhook.
|
||||||
response: Response = webhook.execute()
|
response: Response = webhook.execute()
|
||||||
if not response.ok:
|
if not response.ok:
|
||||||
reader.set_entry_read(entry, False)
|
reader.set_entry_read(entry, False)
|
||||||
|
|
||||||
|
# If we only want to send one entry, we will break the loop. This is used when testing this function.
|
||||||
|
if do_once:
|
||||||
|
break
|
||||||
|
|
||||||
reader.update_search()
|
reader.update_search()
|
||||||
|
@ -49,11 +49,13 @@ from tomlkit.toml_document import TOMLDocument
|
|||||||
|
|
||||||
from discord_rss_bot.feeds import send_to_discord
|
from discord_rss_bot.feeds import send_to_discord
|
||||||
from discord_rss_bot.search import create_html_for_search_results
|
from discord_rss_bot.search import create_html_for_search_results
|
||||||
from discord_rss_bot.settings import read_settings_file, reader
|
from discord_rss_bot.settings import get_reader, read_settings_file
|
||||||
|
|
||||||
app: FastAPI = FastAPI()
|
app: FastAPI = FastAPI()
|
||||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
app.mount("/static", StaticFiles(directory="discord_rss_bot/static"), name="static")
|
||||||
templates: Jinja2Templates = Jinja2Templates(directory="templates")
|
templates: Jinja2Templates = Jinja2Templates(directory="discord_rss_bot/templates")
|
||||||
|
|
||||||
|
reader = get_reader()
|
||||||
|
|
||||||
|
|
||||||
def encode_url(url_to_quote: str) -> str:
|
def encode_url(url_to_quote: str) -> str:
|
||||||
|
@ -1,22 +1,28 @@
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
from typing import Iterable
|
from typing import Iterable
|
||||||
|
|
||||||
from reader import EntrySearchResult, Feed, HighlightedString
|
from reader import EntrySearchResult, Feed, HighlightedString, Reader
|
||||||
|
|
||||||
from discord_rss_bot.settings import reader
|
from discord_rss_bot.settings import get_reader
|
||||||
|
|
||||||
|
|
||||||
def create_html_for_search_results(search_results: Iterable[EntrySearchResult]) -> str:
|
def create_html_for_search_results(search_results: Iterable[EntrySearchResult], reader: Reader = None) -> str:
|
||||||
"""Create HTML for the search results.
|
"""Create HTML for the search results.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
search_results: The search results.
|
search_results: The search results.
|
||||||
|
reader: The reader. If None, we will get the reader from the settings.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The HTML.
|
str: The HTML.
|
||||||
"""
|
"""
|
||||||
# TODO: There is a .content that also contains text, we should use that if .summary is not available.
|
# TODO: There is a .content that also contains text, we should use that if .summary is not available.
|
||||||
# TODO: We should also add <span> tags to the title.
|
# TODO: We should also add <span> tags to the title.
|
||||||
|
|
||||||
|
# Get the default reader if we didn't get a custom one.
|
||||||
|
if reader is None:
|
||||||
|
reader = get_reader()
|
||||||
|
|
||||||
html: str = ""
|
html: str = ""
|
||||||
for result in search_results:
|
for result in search_results:
|
||||||
if ".summary" in result.content:
|
if ".summary" in result.content:
|
||||||
|
@ -51,31 +51,52 @@ def create_settings_file(settings_file_location) -> None:
|
|||||||
f.write(doc.as_string())
|
f.write(doc.as_string())
|
||||||
|
|
||||||
|
|
||||||
def get_db_location(custom_name: str = "db.sqlite") -> str:
|
def get_db_location(custom_location: str = "") -> str:
|
||||||
"""Where we store the database file.
|
"""Where we store the database file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
custom_name: The name of the database file, defaults to db.sqlite.
|
custom_location: Where the database file should be stored. This should be with the file name.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The database location.
|
The database location.
|
||||||
"""
|
"""
|
||||||
return os.path.join(data_dir, custom_name)
|
# Use the custom location if it is provided.
|
||||||
|
if custom_location:
|
||||||
|
return custom_location
|
||||||
|
else:
|
||||||
|
return os.path.join(data_dir, "db.sqlite")
|
||||||
|
|
||||||
|
|
||||||
def read_settings_file(custom_name: str = "settings.toml") -> TOMLDocument:
|
def read_settings_file(custom_location: str = "") -> TOMLDocument:
|
||||||
"""Read the settings file and return the settings as a dict.
|
"""Read the settings file and return the settings as a dict.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
custom_name: The name of the settings file, defaults to settings.toml.
|
custom_location: The name of the settings file, defaults to settings.toml.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: The settings file as a dict.
|
dict: The settings file as a dict.
|
||||||
"""
|
"""
|
||||||
settings_file: str = os.path.join(data_dir, custom_name)
|
# Use the custom location if it is provided.
|
||||||
with open(settings_file, encoding="utf-8") as f:
|
if custom_location:
|
||||||
|
settings_location = custom_location
|
||||||
|
else:
|
||||||
|
settings_location = os.path.join(data_dir, "settings.toml")
|
||||||
|
|
||||||
|
# Create the settings file if it doesn't exist.
|
||||||
|
if not os.path.exists(settings_location):
|
||||||
|
create_settings_file(settings_location)
|
||||||
|
|
||||||
|
# Read the settings file and return it as a dict.
|
||||||
|
with open(settings_location, encoding="utf-8") as f:
|
||||||
return parse(f.read())
|
return parse(f.read())
|
||||||
|
|
||||||
|
|
||||||
db_location: str = get_db_location()
|
def get_reader(custom_location: str = "") -> Reader:
|
||||||
reader: Reader = make_reader(db_location)
|
"""Get the reader.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
custom_location: The location of the database file.
|
||||||
|
|
||||||
|
"""
|
||||||
|
db_location: str = get_db_location(custom_location)
|
||||||
|
return make_reader(url=db_location)
|
||||||
|
66
poetry.lock
generated
66
poetry.lock
generated
@ -171,6 +171,24 @@ category = "main"
|
|||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httpcore"
|
||||||
|
version = "0.16.2"
|
||||||
|
description = "A minimal low-level HTTP client."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
anyio = ">=3.0,<5.0"
|
||||||
|
certifi = "*"
|
||||||
|
h11 = ">=0.13,<0.15"
|
||||||
|
sniffio = ">=1.0.0,<2.0.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
http2 = ["h2 (>=3,<5)"]
|
||||||
|
socks = ["socksio (>=1.0.0,<2.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "httptools"
|
name = "httptools"
|
||||||
version = "0.5.0"
|
version = "0.5.0"
|
||||||
@ -182,6 +200,26 @@ python-versions = ">=3.5.0"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
test = ["Cython (>=0.29.24,<0.30.0)"]
|
test = ["Cython (>=0.29.24,<0.30.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httpx"
|
||||||
|
version = "0.23.1"
|
||||||
|
description = "The next generation HTTP client."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
certifi = "*"
|
||||||
|
httpcore = ">=0.15.0,<0.17.0"
|
||||||
|
rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
|
||||||
|
sniffio = "*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
brotli = ["brotli", "brotlicffi"]
|
||||||
|
cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"]
|
||||||
|
http2 = ["h2 (>=3,<5)"]
|
||||||
|
socks = ["socksio (>=1.0.0,<2.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
version = "3.4"
|
version = "3.4"
|
||||||
@ -385,6 +423,20 @@ urllib3 = ">=1.21.1,<1.27"
|
|||||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rfc3986"
|
||||||
|
version = "1.5.0"
|
||||||
|
description = "Validating URI References per RFC 3986"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
idna2008 = ["idna"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "setuptools"
|
name = "setuptools"
|
||||||
version = "65.6.3"
|
version = "65.6.3"
|
||||||
@ -563,7 +615,7 @@ python-versions = ">=3.7"
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = "^3.9"
|
python-versions = "^3.9"
|
||||||
content-hash = "8d2193ad7226e4dc378c8e5165929840400a59aa6d7cbe4beb8484d176a413ec"
|
content-hash = "573ff5547ffbdb8e61303cd6e161c323ceefdf9678e0846f7a87d25740d78847"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
anyio = [
|
anyio = [
|
||||||
@ -618,6 +670,10 @@ h11 = [
|
|||||||
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
|
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
|
||||||
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
|
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
|
||||||
]
|
]
|
||||||
|
httpcore = [
|
||||||
|
{file = "httpcore-0.16.2-py3-none-any.whl", hash = "sha256:52c79095197178856724541e845f2db86d5f1527640d9254b5b8f6f6cebfdee6"},
|
||||||
|
{file = "httpcore-0.16.2.tar.gz", hash = "sha256:c35c5176dc82db732acfd90b581a3062c999a72305df30c0fc8fafd8e4aca068"},
|
||||||
|
]
|
||||||
httptools = [
|
httptools = [
|
||||||
{file = "httptools-0.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f470c79061599a126d74385623ff4744c4e0f4a0997a353a44923c0b561ee51"},
|
{file = "httptools-0.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f470c79061599a126d74385623ff4744c4e0f4a0997a353a44923c0b561ee51"},
|
||||||
{file = "httptools-0.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e90491a4d77d0cb82e0e7a9cb35d86284c677402e4ce7ba6b448ccc7325c5421"},
|
{file = "httptools-0.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e90491a4d77d0cb82e0e7a9cb35d86284c677402e4ce7ba6b448ccc7325c5421"},
|
||||||
@ -661,6 +717,10 @@ httptools = [
|
|||||||
{file = "httptools-0.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:1af91b3650ce518d226466f30bbba5b6376dbd3ddb1b2be8b0658c6799dd450b"},
|
{file = "httptools-0.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:1af91b3650ce518d226466f30bbba5b6376dbd3ddb1b2be8b0658c6799dd450b"},
|
||||||
{file = "httptools-0.5.0.tar.gz", hash = "sha256:295874861c173f9101960bba332429bb77ed4dcd8cdf5cee9922eb00e4f6bc09"},
|
{file = "httptools-0.5.0.tar.gz", hash = "sha256:295874861c173f9101960bba332429bb77ed4dcd8cdf5cee9922eb00e4f6bc09"},
|
||||||
]
|
]
|
||||||
|
httpx = [
|
||||||
|
{file = "httpx-0.23.1-py3-none-any.whl", hash = "sha256:0b9b1f0ee18b9978d637b0776bfd7f54e2ca278e063e3586d8f01cda89e042a8"},
|
||||||
|
{file = "httpx-0.23.1.tar.gz", hash = "sha256:202ae15319be24efe9a8bd4ed4360e68fde7b38bcc2ce87088d416f026667d19"},
|
||||||
|
]
|
||||||
idna = [
|
idna = [
|
||||||
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
||||||
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||||
@ -838,6 +898,10 @@ requests = [
|
|||||||
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
|
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
|
||||||
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
|
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
|
||||||
]
|
]
|
||||||
|
rfc3986 = [
|
||||||
|
{file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
|
||||||
|
{file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
|
||||||
|
]
|
||||||
setuptools = [
|
setuptools = [
|
||||||
{file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"},
|
{file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"},
|
||||||
{file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"},
|
{file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"},
|
||||||
|
@ -19,6 +19,7 @@ tomlkit = "^0.11.6"
|
|||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
pytest = "^7.1.3"
|
pytest = "^7.1.3"
|
||||||
|
httpx = "^0.23.1"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
43
tests/test_feeds.py
Normal file
43
tests/test_feeds.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from reader import make_reader
|
||||||
|
|
||||||
|
from discord_rss_bot.feeds import send_to_discord
|
||||||
|
|
||||||
|
|
||||||
|
def test_send_to_discord() -> None:
|
||||||
|
"""Test sending to Discord."""
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
# Create the temp directory.
|
||||||
|
os.makedirs(temp_dir, exist_ok=True)
|
||||||
|
assert os.path.exists(temp_dir)
|
||||||
|
|
||||||
|
# Create a temporary reader.
|
||||||
|
reader = make_reader(url=str(Path(temp_dir, "test_db.sqlite")))
|
||||||
|
assert reader is not None
|
||||||
|
|
||||||
|
# Add a feed to the reader.
|
||||||
|
reader.add_feed("https://www.reddit.com/r/Python/.rss")
|
||||||
|
|
||||||
|
# Update the feed to get the entries.
|
||||||
|
reader.update_feeds()
|
||||||
|
|
||||||
|
# Get the feed.
|
||||||
|
feed = reader.get_feed("https://www.reddit.com/r/Python/.rss")
|
||||||
|
assert feed is not None
|
||||||
|
|
||||||
|
# Get the webhook.
|
||||||
|
webhook_url = os.environ.get("TEST_WEBHOOK_URL")
|
||||||
|
assert webhook_url is not None
|
||||||
|
|
||||||
|
# Add tag to the feed and check if it's there.
|
||||||
|
reader.set_tag(feed, "webhook", webhook_url)
|
||||||
|
assert reader.get_tag(feed, "webhook") == webhook_url
|
||||||
|
|
||||||
|
# Send the feed to Discord.
|
||||||
|
send_to_discord(reader=reader, feed=feed, do_once=True)
|
||||||
|
|
||||||
|
# Close the reader, so we can delete the directory.
|
||||||
|
reader.close()
|
26
tests/test_main.py
Normal file
26
tests/test_main.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from discord_rss_bot.main import app, create_list_of_webhooks, encode_url
|
||||||
|
|
||||||
|
client = TestClient(app)
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_main():
|
||||||
|
response = client.get("/")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
def test_add():
|
||||||
|
response = client.get("/add")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
def test_search():
|
||||||
|
response = client.get("/search/?query=a")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
def test_encode_url():
|
||||||
|
before = "https://www.google.com/"
|
||||||
|
after = "https%3A//www.google.com/"
|
||||||
|
assert encode_url(url_to_quote=before) == after
|
45
tests/test_search.py
Normal file
45
tests/test_search.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from reader import make_reader
|
||||||
|
|
||||||
|
from discord_rss_bot.search import add_span_with_slice, create_html_for_search_results
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_html_for_search_results() -> None:
|
||||||
|
"""Test create_html_for_search_results."""
|
||||||
|
# Create a reader.
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
# Create the temp directory.
|
||||||
|
os.makedirs(temp_dir, exist_ok=True)
|
||||||
|
assert os.path.exists(temp_dir)
|
||||||
|
|
||||||
|
# Create a temporary reader.
|
||||||
|
reader = make_reader(url=str(Path(temp_dir, "test_db.sqlite")))
|
||||||
|
assert reader is not None
|
||||||
|
|
||||||
|
# Add a feed to the reader.
|
||||||
|
reader.add_feed("https://www.reddit.com/r/Python/.rss")
|
||||||
|
|
||||||
|
# Update the feed to get the entries.
|
||||||
|
reader.update_feeds()
|
||||||
|
|
||||||
|
# Get the feed.
|
||||||
|
feed = reader.get_feed("https://www.reddit.com/r/Python/.rss")
|
||||||
|
assert feed is not None
|
||||||
|
|
||||||
|
# Update the search index.
|
||||||
|
reader.enable_search()
|
||||||
|
reader.update_search()
|
||||||
|
|
||||||
|
# Get the HTML for the search results.
|
||||||
|
search_results = reader.search_entries("a", feed=feed)
|
||||||
|
|
||||||
|
# Create the HTML and check if it is not empty.
|
||||||
|
search_html: str = create_html_for_search_results(search_results, reader)
|
||||||
|
assert search_html is not None
|
||||||
|
assert len(search_html) > 10
|
||||||
|
|
||||||
|
# Close the reader, so we can delete the directory.
|
||||||
|
reader.close()
|
83
tests/test_settings.py
Normal file
83
tests/test_settings.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from platformdirs import user_data_dir
|
||||||
|
from reader import Reader
|
||||||
|
from tomlkit import TOMLDocument
|
||||||
|
|
||||||
|
from discord_rss_bot.settings import create_settings_file, data_dir, get_db_location, get_reader, read_settings_file
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_settings_file():
|
||||||
|
"""Test reading the settings file."""
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
custom_loc = os.path.join(temp_dir, "test_settings.toml")
|
||||||
|
|
||||||
|
# File should not exist yet should this should fail.
|
||||||
|
assert not os.path.exists(custom_loc)
|
||||||
|
|
||||||
|
# Create the file.
|
||||||
|
settings: TOMLDocument = read_settings_file(custom_location=custom_loc)
|
||||||
|
|
||||||
|
# Check if the settings file is a toml document.
|
||||||
|
assert isinstance(settings, TOMLDocument)
|
||||||
|
|
||||||
|
# Check if file exists
|
||||||
|
assert os.path.exists(os.path.join(temp_dir, "test_settings.toml"))
|
||||||
|
|
||||||
|
# Check if the file has the correct contents
|
||||||
|
assert settings["webhooks"] == {}
|
||||||
|
assert settings["database"] == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_db_location():
|
||||||
|
"""Test getting the database location."""
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
custom_loc = os.path.join(temp_dir, "test_db.sqlite")
|
||||||
|
|
||||||
|
# File should not exist yet.
|
||||||
|
assert not os.path.exists(custom_loc)
|
||||||
|
|
||||||
|
# Create the file and check if it exists.
|
||||||
|
assert get_db_location(custom_location=custom_loc) == os.path.join(temp_dir, "test_db.sqlite")
|
||||||
|
|
||||||
|
# Test with the default location
|
||||||
|
loc = user_data_dir(appname="discord_rss_bot", appauthor="TheLovinator", roaming=True)
|
||||||
|
assert get_db_location() == os.path.join(loc, "db.sqlite")
|
||||||
|
|
||||||
|
|
||||||
|
def test_reader():
|
||||||
|
"""Test the reader."""
|
||||||
|
reader = get_reader()
|
||||||
|
assert isinstance(reader, Reader)
|
||||||
|
|
||||||
|
# Test the reader with a custom location.
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
# Create the temp directory
|
||||||
|
os.makedirs(temp_dir, exist_ok=True)
|
||||||
|
|
||||||
|
custom_loc = pathlib.Path(temp_dir, "custom_loc_db.sqlite")
|
||||||
|
custom_reader = get_reader(custom_location=str(custom_loc))
|
||||||
|
assert isinstance(custom_reader, Reader)
|
||||||
|
|
||||||
|
# Close the reader, so we can delete the directory.
|
||||||
|
custom_reader.close()
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_settings_file():
|
||||||
|
"""Test creating the settings file."""
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
settings_file_location: str = os.path.join(temp_dir, "settings.toml")
|
||||||
|
|
||||||
|
# File should not exist yet.
|
||||||
|
assert not os.path.exists(settings_file_location)
|
||||||
|
|
||||||
|
# Create the file and check if it exists.
|
||||||
|
create_settings_file(settings_file_location)
|
||||||
|
assert os.path.exists(settings_file_location)
|
||||||
|
|
||||||
|
|
||||||
|
def test_data_dir():
|
||||||
|
"""Test the data directory."""
|
||||||
|
assert os.path.exists(data_dir)
|
Reference in New Issue
Block a user