Fix all the bugs
This commit is contained in:
49
.github/workflows/build.yml
vendored
49
.github/workflows/build.yml
vendored
@ -1,44 +1,47 @@
|
||||
---
|
||||
name: Test code
|
||||
on:
|
||||
schedule:
|
||||
- cron: "27 6 * * *"
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
branches: [master]
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
TEST_WEBHOOK_URL: ${{ secrets.TEST_WEBHOOK_URL }}
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: pipx install poetry
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
cache: "poetry"
|
||||
python-version: 3.12
|
||||
- run: pipx install poetry
|
||||
- run: poetry install
|
||||
- run: poetry run pytest
|
||||
env:
|
||||
TEST_WEBHOOK_URL: ${{ secrets.TEST_WEBHOOK_URL }}
|
||||
- name: Login to GitHub Container Registry
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
needs: test
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ghcr.io/thelovinator1/discord-rss-bot
|
||||
flavor: latest=${{ github.ref == 'refs/heads/master' }}
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v4
|
||||
- uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: |
|
||||
ghcr.io/thelovinator1/discord-free-game-notifier:latest
|
||||
ghcr.io/thelovinator1/discord-free-game-notifier:master
|
||||
|
53
.pre-commit-config.yaml
Normal file
53
.pre-commit-config.yaml
Normal file
@ -0,0 +1,53 @@
|
||||
default_language_version:
|
||||
python: python3.12
|
||||
repos:
|
||||
# Automatically add trailing commas to calls and literals.
|
||||
- repo: https://github.com/asottile/add-trailing-comma
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: add-trailing-comma
|
||||
|
||||
# Some out-of-the-box hooks for pre-commit.
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
- id: check-ast
|
||||
- id: check-builtin-literals
|
||||
- id: check-case-conflict
|
||||
- id: check-docstring-first
|
||||
- id: check-executables-have-shebangs
|
||||
- id: check-merge-conflict
|
||||
- id: check-shebang-scripts-are-executable
|
||||
- id: check-symlinks
|
||||
- id: check-toml
|
||||
- id: check-vcs-permalinks
|
||||
- id: check-xml
|
||||
- id: check-yaml
|
||||
- id: debug-statements
|
||||
- id: end-of-file-fixer
|
||||
- id: mixed-line-ending
|
||||
- id: name-tests-test
|
||||
args: [--django]
|
||||
- id: trailing-whitespace
|
||||
|
||||
# Run Pyupgrade on all Python files. This will upgrade the code to Python 3.12.
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.15.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: ["--py312-plus"]
|
||||
|
||||
# An extremely fast Python linter and formatter.
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: ["--fix", "--exit-non-zero-on-fix"]
|
||||
|
||||
# Static checker for GitHub Actions workflow files.
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.0
|
||||
hooks:
|
||||
- id: actionlint
|
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
@ -6,7 +6,7 @@
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: FastAPI",
|
||||
"type": "python",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"args": ["discord_rss_bot.main:app", "--reload"],
|
||||
|
8
.vscode/settings.json
vendored
Normal file
8
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"botuser",
|
||||
"Genshins",
|
||||
"levelname",
|
||||
"pipx"
|
||||
]
|
||||
}
|
78
Dockerfile
78
Dockerfile
@ -1,38 +1,64 @@
|
||||
FROM python:3.12-slim
|
||||
# Stage 1: Build the requirements.txt using Poetry.
|
||||
FROM python:3.12 AS builder
|
||||
|
||||
# Force the stdout and stderr streams to be unbuffered.
|
||||
# Will allow log messages to be immediately dumped instead of being buffered.
|
||||
# This is useful when the bot crashes before writing messages stuck in the buffer.
|
||||
# Set environment variables for Python.
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PATH="${PATH}:/root/.local/bin"
|
||||
|
||||
# Install system dependencies.
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Poetry.
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
# Copy only the poetry.lock/pyproject.toml to leverage Docker cache.
|
||||
WORKDIR /app
|
||||
COPY pyproject.toml poetry.lock /app/
|
||||
|
||||
# Install dependencies and create requirements.txt.
|
||||
RUN poetry self add poetry-plugin-export && poetry export --format=requirements.txt --output=requirements.txt --only=main --without-hashes
|
||||
|
||||
# Stage 2: Install dependencies and run the application
|
||||
FROM python:3.12 AS runner
|
||||
|
||||
# Set environment variables for Python.
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
# Don't generate byte code (.pyc-files).
|
||||
# These are only needed if we run the python-files several times.
|
||||
# Docker doesn't keep the data between runs so this adds nothing.
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
|
||||
# Install Poetry
|
||||
RUN pip install poetry --no-cache-dir --disable-pip-version-check --no-color
|
||||
|
||||
# Creata the botuser and create the directory where the code will be stored.
|
||||
RUN useradd --create-home botuser && \
|
||||
# Create a non-root user.
|
||||
RUN useradd -ms /bin/bash botuser && \
|
||||
install --verbose --directory --mode=0775 --owner=botuser --group=botuser /home/botuser/discord-rss-bot/ && \
|
||||
install --verbose --directory --mode=0775 --owner=botuser --group=botuser /home/botuser/.local/share/discord_rss_bot/
|
||||
|
||||
# Copy the generated requirements.txt from the builder stage.
|
||||
WORKDIR /home/botuser/discord-rss-bot
|
||||
COPY --from=builder /app/requirements.txt /home/botuser/discord-rss-bot/
|
||||
|
||||
# Create a virtual environment and install dependencies.
|
||||
RUN python -m venv /home/botuser/.venv && \
|
||||
. /home/botuser/.venv/bin/activate && \
|
||||
pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir --upgrade setuptools wheel && \
|
||||
pip install --no-cache-dir --requirement requirements.txt
|
||||
|
||||
# Copy the rest of the application code.
|
||||
COPY . /home/botuser/discord-rss-bot/
|
||||
|
||||
# Change to the bot user so we don't run as root.
|
||||
USER botuser
|
||||
|
||||
# Copy files from our repository to the container.
|
||||
ADD --chown=botuser:botuser pyproject.toml poetry.lock README.md LICENSE /home/botuser/discord-rss-bot/
|
||||
|
||||
# This is the directory where the code will be stored.
|
||||
WORKDIR /home/botuser/discord-rss-bot
|
||||
|
||||
# Install the dependencies.
|
||||
RUN poetry install --no-interaction --no-ansi --only main
|
||||
|
||||
ADD --chown=botuser:botuser discord_rss_bot /home/botuser/discord-rss-bot/discord_rss_bot/
|
||||
|
||||
# The uvicorn server will listen on this port.
|
||||
EXPOSE 5000
|
||||
|
||||
# Where our database file will be stored.
|
||||
VOLUME /home/botuser/.local/share/discord_rss_bot/
|
||||
|
||||
CMD ["poetry", "run", "uvicorn", "discord_rss_bot.main:app", "--host", "0.0.0.0", "--port", "5000", "--proxy-headers", "--forwarded-allow-ips='*'"]
|
||||
# Print the folder structure and wait so we can inspect the container.
|
||||
# CMD ["tail", "-f", "/dev/null"]
|
||||
|
||||
# Run the application.
|
||||
CMD ["/home/botuser/.venv/bin/python", "-m", "uvicorn", "discord_rss_bot.main:app", "--host=0.0.0.0", "--port=5000", "--proxy-headers", "--forwarded-allow-ips='*'", "--log-level", "debug"]
|
||||
|
@ -2,9 +2,8 @@
|
||||
|
||||
Subscribe to RSS feeds and get updates to a Discord webhook.
|
||||
|
||||
## This bot is not ready for production use.
|
||||
|
||||
You should use [MonitoRSS](https://github.com/synzen/monitorss) instead.
|
||||
> [!NOTE]
|
||||
> You should look at [MonitoRSS](https://github.com/synzen/monitorss) for a more feature-rich project.
|
||||
|
||||
## Installation
|
||||
|
||||
|
@ -1,25 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from reader import Entry, Feed, Reader, TagNotFoundError
|
||||
|
||||
from discord_rss_bot.is_url_valid import is_url_valid
|
||||
from discord_rss_bot.markdown import convert_html_to_md
|
||||
from discord_rss_bot.settings import get_reader
|
||||
from discord_rss_bot.settings import get_reader, logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from reader.types import JSONType
|
||||
|
||||
|
||||
@dataclass()
|
||||
@dataclass(slots=True)
|
||||
class CustomEmbed:
|
||||
title: str
|
||||
description: str
|
||||
color: str
|
||||
author_name: str
|
||||
author_url: str
|
||||
author_icon_url: str
|
||||
image_url: str
|
||||
thumbnail_url: str
|
||||
footer_text: str
|
||||
footer_icon_url: str
|
||||
title: str = ""
|
||||
description: str = ""
|
||||
color: str = ""
|
||||
author_name: str = ""
|
||||
author_url: str = ""
|
||||
author_icon_url: str = ""
|
||||
image_url: str = ""
|
||||
thumbnail_url: str = ""
|
||||
footer_text: str = ""
|
||||
footer_icon_url: str = ""
|
||||
|
||||
|
||||
def try_to_replace(custom_message: str, template: str, replace_with: str) -> str:
|
||||
@ -59,7 +66,7 @@ def replace_tags_in_text_message(entry: Entry) -> str:
|
||||
|
||||
summary: str = entry.summary or ""
|
||||
|
||||
first_image = get_first_image(summary, content)
|
||||
first_image: str = get_first_image(summary, content)
|
||||
|
||||
summary = convert_html_to_md(summary)
|
||||
content = convert_html_to_md(content)
|
||||
@ -102,7 +109,7 @@ def replace_tags_in_text_message(entry: Entry) -> str:
|
||||
return custom_message.replace("\\n", "\n")
|
||||
|
||||
|
||||
def get_first_image(summary, content):
|
||||
def get_first_image(summary: str | None, content: str | None) -> str:
|
||||
"""Get image from summary or content.
|
||||
|
||||
Args:
|
||||
@ -112,10 +119,25 @@ def get_first_image(summary, content):
|
||||
Returns:
|
||||
The first image
|
||||
"""
|
||||
# TODO(TheLovinator): We should find a better way to get the image.
|
||||
if content and (images := BeautifulSoup(content, features="lxml").find_all("img")):
|
||||
return images[0].attrs["src"]
|
||||
for image in images:
|
||||
if not is_url_valid(image.attrs["src"]):
|
||||
logger.warning(f"Invalid URL: {image.attrs['src']}")
|
||||
continue
|
||||
|
||||
# Genshins first image is a divider, so we ignore it.
|
||||
if not image.attrs["src"].startswith("https://img-os-static.hoyolab.com/divider_config"):
|
||||
return str(image.attrs["src"])
|
||||
if summary and (images := BeautifulSoup(summary, features="lxml").find_all("img")):
|
||||
return images[0].attrs["src"]
|
||||
for image in images:
|
||||
if not is_url_valid(image.attrs["src"]):
|
||||
logger.warning(f"Invalid URL: {image.attrs['src']}")
|
||||
continue
|
||||
|
||||
# Genshins first image is a divider, so we ignore it.
|
||||
if not image.attrs["src"].startswith("https://img-os-static.hoyolab.com/divider_config"):
|
||||
return str(image.attrs["src"])
|
||||
return ""
|
||||
|
||||
|
||||
@ -139,42 +161,47 @@ def replace_tags_in_embed(feed: Feed, entry: Entry) -> CustomEmbed:
|
||||
|
||||
summary: str = entry.summary or ""
|
||||
|
||||
first_image = get_first_image(summary, content)
|
||||
first_image: str = get_first_image(summary, content)
|
||||
|
||||
summary = convert_html_to_md(summary)
|
||||
content = convert_html_to_md(content)
|
||||
|
||||
entry_text: str = content or summary
|
||||
feed_added: str = feed.added.strftime("%Y-%m-%d %H:%M:%S") if feed.added else "Never"
|
||||
feed_last_updated: str = feed.last_updated.strftime("%Y-%m-%d %H:%M:%S") if feed.last_updated else "Never"
|
||||
feed_updated: str = feed.updated.strftime("%Y-%m-%d %H:%M:%S") if feed.updated else "Never"
|
||||
entry_added: str = entry.added.strftime("%Y-%m-%d %H:%M:%S") if entry.added else "Never"
|
||||
entry_published: str = entry.published.strftime("%Y-%m-%d %H:%M:%S") if entry.published else "Never"
|
||||
entry_read_modified: str = entry.read_modified.strftime("%Y-%m-%d %H:%M:%S") if entry.read_modified else "Never"
|
||||
entry_updated: str = entry.updated.strftime("%Y-%m-%d %H:%M:%S") if entry.updated else "Never"
|
||||
|
||||
list_of_replacements = [
|
||||
{"{{feed_author}}": feed.author},
|
||||
{"{{feed_added}}": feed.added},
|
||||
{"{{feed_last_exception}}": feed.last_exception},
|
||||
{"{{feed_last_updated}}": feed.last_updated},
|
||||
{"{{feed_link}}": feed.link},
|
||||
{"{{feed_subtitle}}": feed.subtitle},
|
||||
{"{{feed_title}}": feed.title},
|
||||
{"{{feed_updated}}": feed.updated},
|
||||
{"{{feed_updates_enabled}}": str(feed.updates_enabled)},
|
||||
{"{{feed_url}}": feed.url},
|
||||
{"{{feed_user_title}}": feed.user_title},
|
||||
{"{{feed_version}}": feed.version},
|
||||
{"{{entry_added}}": entry.added},
|
||||
{"{{entry_author}}": entry.author},
|
||||
{"{{entry_content}}": content},
|
||||
list_of_replacements: list[dict[str, str]] = [
|
||||
{"{{feed_author}}": feed.author or ""},
|
||||
{"{{feed_added}}": feed_added or ""},
|
||||
{"{{feed_last_updated}}": feed_last_updated or ""},
|
||||
{"{{feed_link}}": feed.link or ""},
|
||||
{"{{feed_subtitle}}": feed.subtitle or ""},
|
||||
{"{{feed_title}}": feed.title or ""},
|
||||
{"{{feed_updated}}": feed_updated or ""},
|
||||
{"{{feed_updates_enabled}}": "True" if feed.updates_enabled else "False"},
|
||||
{"{{feed_url}}": feed.url or ""},
|
||||
{"{{feed_user_title}}": feed.user_title or ""},
|
||||
{"{{feed_version}}": feed.version or ""},
|
||||
{"{{entry_added}}": entry_added or ""},
|
||||
{"{{entry_author}}": entry.author or ""},
|
||||
{"{{entry_content}}": content or ""},
|
||||
{"{{entry_content_raw}}": entry.content[0].value if entry.content else ""},
|
||||
{"{{entry_id}}": entry.id},
|
||||
{"{{entry_important}}": str(entry.important)},
|
||||
{"{{entry_link}}": entry.link},
|
||||
{"{{entry_published}}": entry.published},
|
||||
{"{{entry_read}}": str(entry.read)},
|
||||
{"{{entry_read_modified}}": entry.read_modified},
|
||||
{"{{entry_summary}}": summary},
|
||||
{"{{entry_important}}": "True" if entry.important else "False"},
|
||||
{"{{entry_link}}": entry.link or ""},
|
||||
{"{{entry_published}}": entry_published},
|
||||
{"{{entry_read}}": "True" if entry.read else "False"},
|
||||
{"{{entry_read_modified}}": entry_read_modified or ""},
|
||||
{"{{entry_summary}}": summary or ""},
|
||||
{"{{entry_summary_raw}}": entry.summary or ""},
|
||||
{"{{entry_title}}": entry.title},
|
||||
{"{{entry_text}}": entry_text},
|
||||
{"{{entry_updated}}": entry.updated},
|
||||
{"{{image_1}}": first_image},
|
||||
{"{{entry_text}}": content or summary or ""},
|
||||
{"{{entry_title}}": entry.title or ""},
|
||||
{"{{entry_updated}}": entry_updated or ""},
|
||||
{"{{image_1}}": first_image or ""},
|
||||
]
|
||||
|
||||
for replacement in list_of_replacements:
|
||||
@ -246,9 +273,10 @@ def get_embed(custom_reader: Reader, feed: Feed) -> CustomEmbed:
|
||||
Returns:
|
||||
Returns the contents from the embed tag.
|
||||
"""
|
||||
if embed := custom_reader.get_tag(feed, "embed", ""):
|
||||
if type(embed) != str:
|
||||
return get_embed_data(embed)
|
||||
embed: str | JSONType = custom_reader.get_tag(feed, "embed", "")
|
||||
if embed:
|
||||
if not isinstance(embed, str):
|
||||
return get_embed_data(embed) # type: ignore
|
||||
embed_data: dict[str, str | int] = json.loads(embed)
|
||||
return get_embed_data(embed_data)
|
||||
|
||||
@ -266,7 +294,7 @@ def get_embed(custom_reader: Reader, feed: Feed) -> CustomEmbed:
|
||||
)
|
||||
|
||||
|
||||
def get_embed_data(embed_data) -> CustomEmbed:
|
||||
def get_embed_data(embed_data: dict[str, str | int]) -> CustomEmbed:
|
||||
"""Get embed data from embed_data.
|
||||
|
||||
Args:
|
||||
@ -275,16 +303,16 @@ def get_embed_data(embed_data) -> CustomEmbed:
|
||||
Returns:
|
||||
Returns the embed data.
|
||||
"""
|
||||
title: str = embed_data.get("title", "")
|
||||
description: str = embed_data.get("description", "")
|
||||
color: str = embed_data.get("color", "")
|
||||
author_name: str = embed_data.get("author_name", "")
|
||||
author_url: str = embed_data.get("author_url", "")
|
||||
author_icon_url: str = embed_data.get("author_icon_url", "")
|
||||
image_url: str = embed_data.get("image_url", "")
|
||||
thumbnail_url: str = embed_data.get("thumbnail_url", "")
|
||||
footer_text: str = embed_data.get("footer_text", "")
|
||||
footer_icon_url: str = embed_data.get("footer_icon_url", "")
|
||||
title: str = str(embed_data.get("title", ""))
|
||||
description: str = str(embed_data.get("description", ""))
|
||||
color: str = str(embed_data.get("color", ""))
|
||||
author_name: str = str(embed_data.get("author_name", ""))
|
||||
author_url: str = str(embed_data.get("author_url", ""))
|
||||
author_icon_url: str = str(embed_data.get("author_icon_url", ""))
|
||||
image_url: str = str(embed_data.get("image_url", ""))
|
||||
thumbnail_url: str = str(embed_data.get("thumbnail_url", ""))
|
||||
footer_text: str = str(embed_data.get("footer_text", ""))
|
||||
footer_icon_url: str = str(embed_data.get("footer_icon_url", ""))
|
||||
|
||||
return CustomEmbed(
|
||||
title=title,
|
||||
|
@ -1,3 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import pprint
|
||||
import textwrap
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from discord_webhook import DiscordEmbed, DiscordWebhook
|
||||
@ -7,7 +12,8 @@ from reader import Entry, Feed, FeedExistsError, Reader, TagNotFoundError
|
||||
from discord_rss_bot import custom_message
|
||||
from discord_rss_bot.filter.blacklist import should_be_skipped
|
||||
from discord_rss_bot.filter.whitelist import has_white_tags, should_be_sent
|
||||
from discord_rss_bot.settings import default_custom_message, get_reader
|
||||
from discord_rss_bot.is_url_valid import is_url_valid
|
||||
from discord_rss_bot.settings import default_custom_message, get_reader, logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
@ -38,7 +44,10 @@ def send_entry_to_discord(entry: Entry, custom_reader: Reader | None = None) ->
|
||||
if custom_message.get_custom_message(reader, entry.feed) != "": # noqa: PLC1901
|
||||
webhook_message = custom_message.replace_tags_in_text_message(entry=entry)
|
||||
else:
|
||||
webhook_message: str = default_custom_message
|
||||
webhook_message: str = str(default_custom_message)
|
||||
|
||||
if not webhook_message:
|
||||
webhook_message = "No message found."
|
||||
|
||||
# Create the webhook.
|
||||
if bool(reader.get_tag(entry.feed, "should_send_embed")):
|
||||
@ -47,7 +56,10 @@ def send_entry_to_discord(entry: Entry, custom_reader: Reader | None = None) ->
|
||||
webhook: DiscordWebhook = DiscordWebhook(url=webhook_url, content=webhook_message, rate_limit_retry=True)
|
||||
|
||||
response: Response = webhook.execute()
|
||||
return None if response.ok else f"Error sending entry to Discord: {response.text}"
|
||||
if response.status_code not in {200, 204}:
|
||||
logger.error("Error sending entry to Discord: %s\n%s", response.text, pprint.pformat(webhook.json))
|
||||
return f"Error sending entry to Discord: {response.text}"
|
||||
return None
|
||||
|
||||
|
||||
def create_embed_webhook(webhook_url: str, entry: Entry) -> DiscordWebhook:
|
||||
@ -68,42 +80,57 @@ def create_embed_webhook(webhook_url: str, entry: Entry) -> DiscordWebhook:
|
||||
|
||||
discord_embed: DiscordEmbed = DiscordEmbed()
|
||||
|
||||
if custom_embed.title:
|
||||
discord_embed.set_title(custom_embed.title)
|
||||
if custom_embed.description:
|
||||
discord_embed.set_description(custom_embed.description)
|
||||
if custom_embed.color and type(custom_embed.color) == str and custom_embed.color.startswith("#"):
|
||||
custom_embed.color = custom_embed.color[1:]
|
||||
discord_embed.set_color(int(custom_embed.color, 16))
|
||||
if custom_embed.author_name and not custom_embed.author_url and not custom_embed.author_icon_url:
|
||||
embed_title: str = textwrap.shorten(custom_embed.title, width=200, placeholder="...")
|
||||
discord_embed.set_title(embed_title) if embed_title else None
|
||||
|
||||
webhook_message: str = textwrap.shorten(custom_embed.description, width=2000, placeholder="...")
|
||||
discord_embed.set_description(webhook_message) if webhook_message else None
|
||||
|
||||
custom_embed_author_url: str | None = custom_embed.author_url
|
||||
if not is_url_valid(custom_embed_author_url):
|
||||
custom_embed_author_url = None
|
||||
|
||||
custom_embed_color: str | None = custom_embed.color or None
|
||||
if custom_embed_color and custom_embed_color.startswith("#"):
|
||||
custom_embed_color = custom_embed_color[1:]
|
||||
discord_embed.set_color(int(custom_embed_color, 16))
|
||||
|
||||
if custom_embed.author_name and not custom_embed_author_url and not custom_embed.author_icon_url:
|
||||
discord_embed.set_author(name=custom_embed.author_name)
|
||||
if custom_embed.author_name and custom_embed.author_url and not custom_embed.author_icon_url:
|
||||
discord_embed.set_author(name=custom_embed.author_name, url=custom_embed.author_url)
|
||||
if custom_embed.author_name and not custom_embed.author_url and custom_embed.author_icon_url:
|
||||
|
||||
if custom_embed.author_name and custom_embed_author_url and not custom_embed.author_icon_url:
|
||||
discord_embed.set_author(name=custom_embed.author_name, url=custom_embed_author_url)
|
||||
|
||||
if custom_embed.author_name and not custom_embed_author_url and custom_embed.author_icon_url:
|
||||
discord_embed.set_author(name=custom_embed.author_name, icon_url=custom_embed.author_icon_url)
|
||||
if custom_embed.author_name and custom_embed.author_url and custom_embed.author_icon_url:
|
||||
|
||||
if custom_embed.author_name and custom_embed_author_url and custom_embed.author_icon_url:
|
||||
discord_embed.set_author(
|
||||
name=custom_embed.author_name,
|
||||
url=custom_embed.author_url,
|
||||
url=custom_embed_author_url,
|
||||
icon_url=custom_embed.author_icon_url,
|
||||
)
|
||||
|
||||
if custom_embed.thumbnail_url:
|
||||
discord_embed.set_thumbnail(url=custom_embed.thumbnail_url)
|
||||
|
||||
if custom_embed.image_url:
|
||||
discord_embed.set_image(url=custom_embed.image_url)
|
||||
|
||||
if custom_embed.footer_text:
|
||||
discord_embed.set_footer(text=custom_embed.footer_text)
|
||||
|
||||
if custom_embed.footer_icon_url and custom_embed.footer_text:
|
||||
discord_embed.set_footer(text=custom_embed.footer_text, icon_url=custom_embed.footer_icon_url)
|
||||
|
||||
if custom_embed.footer_icon_url and not custom_embed.footer_text:
|
||||
discord_embed.set_footer(text="-", icon_url=custom_embed.footer_icon_url)
|
||||
|
||||
webhook.add_embed(discord_embed)
|
||||
|
||||
return webhook
|
||||
|
||||
|
||||
def send_to_discord(custom_reader: Reader | None = None, feed: Feed | None = None, do_once: bool = False) -> None:
|
||||
def send_to_discord(custom_reader: Reader | None = None, feed: Feed | None = None, *, do_once: bool = False) -> None: # noqa: PLR0912
|
||||
"""Send entries to Discord.
|
||||
|
||||
If response was not ok, we will log the error and mark the entry as unread, so it will be sent again next time.
|
||||
@ -125,6 +152,11 @@ def send_to_discord(custom_reader: Reader | None = None, feed: Feed | None = Non
|
||||
# Loop through the unread entries.
|
||||
entries: Iterable[Entry] = reader.get_entries(feed=feed, read=False)
|
||||
for entry in entries:
|
||||
if entry.added < datetime.datetime.now(tz=entry.added.tzinfo) - datetime.timedelta(days=1):
|
||||
logger.info("Entry is older than 24 hours: %s from %s", entry.id, entry.feed.url)
|
||||
reader.set_entry_read(entry, True)
|
||||
continue
|
||||
|
||||
# Set the webhook to read, so we don't send it again.
|
||||
reader.set_entry_read(entry, True)
|
||||
|
||||
@ -138,10 +170,13 @@ def send_to_discord(custom_reader: Reader | None = None, feed: Feed | None = Non
|
||||
else:
|
||||
# If the user has set the custom message to an empty string, we will use the default message, otherwise we
|
||||
# will use the custom message.
|
||||
if custom_message.get_custom_message(reader, entry.feed) != "":
|
||||
if custom_message.get_custom_message(reader, entry.feed) != "": # noqa: PLC1901
|
||||
webhook_message = custom_message.replace_tags_in_text_message(entry)
|
||||
else:
|
||||
webhook_message: str = default_custom_message
|
||||
webhook_message: str = str(default_custom_message)
|
||||
|
||||
# Truncate the webhook_message to 2000 characters
|
||||
webhook_message = textwrap.shorten(webhook_message, width=2000, placeholder="...")
|
||||
|
||||
# Create the webhook.
|
||||
webhook: DiscordWebhook = DiscordWebhook(url=webhook_url, content=webhook_message, rate_limit_retry=True)
|
||||
@ -150,25 +185,30 @@ def send_to_discord(custom_reader: Reader | None = None, feed: Feed | None = Non
|
||||
if has_white_tags(reader, entry.feed):
|
||||
if should_be_sent(reader, entry):
|
||||
response: Response = webhook.execute()
|
||||
if response.status_code not in {200, 204}:
|
||||
logger.error("Error sending entry to Discord: %s\n%s", response.text, pprint.pformat(webhook.json))
|
||||
|
||||
reader.set_entry_read(entry, True)
|
||||
if not response.ok:
|
||||
reader.set_entry_read(entry, False)
|
||||
else:
|
||||
return
|
||||
reader.set_entry_read(entry, True)
|
||||
continue
|
||||
|
||||
# Check if the entry is blacklisted, if it is, mark it as read and continue.
|
||||
if should_be_skipped(reader, entry):
|
||||
logger.info("Entry was blacklisted: %s", entry.id)
|
||||
reader.set_entry_read(entry, True)
|
||||
continue
|
||||
|
||||
# It was not blacklisted, and not forced through whitelist, so we will send it to Discord.
|
||||
response: Response = webhook.execute()
|
||||
if not response.ok:
|
||||
reader.set_entry_read(entry, False)
|
||||
if response.status_code not in {200, 204}:
|
||||
logger.error("Error sending entry to Discord: %s\n%s", response.text, pprint.pformat(webhook.json))
|
||||
reader.set_entry_read(entry, True)
|
||||
return
|
||||
|
||||
# If we only want to send one entry, we will break the loop. This is used when testing this function.
|
||||
if do_once:
|
||||
logger.info("Sent one entry to Discord.")
|
||||
break
|
||||
|
||||
# Update the search index.
|
||||
@ -196,11 +236,10 @@ def create_feed(reader: Reader, feed_url: str, webhook_dropdown: str) -> None:
|
||||
break
|
||||
|
||||
if not webhook_url:
|
||||
# TODO: Show this error on the page.
|
||||
raise HTTPException(status_code=404, detail="Webhook not found")
|
||||
|
||||
try:
|
||||
# TODO: Check if the feed is valid
|
||||
# TODO(TheLovinator): Check if the feed is valid
|
||||
reader.add_feed(clean_feed_url)
|
||||
except FeedExistsError:
|
||||
# Add the webhook to an already added feed if it doesn't have a webhook instead of trying to create a new.
|
||||
@ -217,7 +256,7 @@ def create_feed(reader: Reader, feed_url: str, webhook_dropdown: str) -> None:
|
||||
reader.set_entry_read(entry, True)
|
||||
|
||||
if not default_custom_message:
|
||||
# TODO: Show this error on the page.
|
||||
# TODO(TheLovinator): Show this error on the page.
|
||||
raise HTTPException(status_code=404, detail="Default custom message couldn't be found.")
|
||||
|
||||
# This is the webhook that will be used to send the feed to Discord.
|
||||
|
@ -40,7 +40,7 @@ def should_be_skipped(custom_reader: Reader, entry: Entry) -> bool:
|
||||
blacklist_summary: str = str(custom_reader.get_tag(feed, "blacklist_summary", ""))
|
||||
blacklist_content: str = str(custom_reader.get_tag(feed, "blacklist_content", ""))
|
||||
blacklist_author: str = str(custom_reader.get_tag(feed, "blacklist_author", ""))
|
||||
# TODO: Also add support for entry_text and more.
|
||||
# TODO(TheLovinator): Also add support for entry_text and more.
|
||||
|
||||
if entry.title and blacklist_title and is_word_in_text(blacklist_title, entry.title):
|
||||
return True
|
||||
|
@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
|
||||
|
||||
|
@ -9,7 +9,7 @@ def healthcheck() -> None:
|
||||
sys.exit(0): success - the container is healthy and ready for use.
|
||||
sys.exit(1): unhealthy - the container is not working correctly.
|
||||
"""
|
||||
# TODO: We should check more than just that the website is up.
|
||||
# TODO(TheLovinator): We should check more than just that the website is up.
|
||||
try:
|
||||
r: requests.Response = requests.get(url="http://localhost:5000", timeout=5)
|
||||
if r.ok:
|
||||
|
17
discord_rss_bot/is_url_valid.py
Normal file
17
discord_rss_bot/is_url_valid.py
Normal file
@ -0,0 +1,17 @@
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
|
||||
def is_url_valid(url: str) -> bool:
|
||||
"""Check if a URL is valid.
|
||||
|
||||
Args:
|
||||
url: The URL to check.
|
||||
|
||||
Returns:
|
||||
bool: True if the URL is valid, False otherwise.
|
||||
"""
|
||||
try:
|
||||
result: ParseResult = urlparse(url)
|
||||
return all([result.scheme, result.netloc])
|
||||
except ValueError:
|
||||
return False
|
@ -1,20 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import typing
|
||||
import urllib.parse
|
||||
from collections.abc import Iterable
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from functools import lru_cache
|
||||
from typing import cast
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
import httpx
|
||||
import uvicorn
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from fastapi import FastAPI, Form, HTTPException, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from httpx import Response
|
||||
from reader import Entry, Feed, FeedNotFoundError, Reader, TagNotFoundError
|
||||
from reader.types import JSONType
|
||||
from starlette.responses import RedirectResponse
|
||||
|
||||
from discord_rss_bot import settings
|
||||
@ -38,11 +42,32 @@ from discord_rss_bot.search import create_html_for_search_results
|
||||
from discord_rss_bot.settings import get_reader
|
||||
from discord_rss_bot.webhook import add_webhook, remove_webhook
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
|
||||
reader: Reader = get_reader()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI) -> typing.AsyncGenerator[None, None]:
|
||||
"""This is needed for the ASGI server to run."""
|
||||
add_missing_tags(reader=reader)
|
||||
scheduler: AsyncIOScheduler = AsyncIOScheduler()
|
||||
|
||||
# Update all feeds every 15 minutes.
|
||||
# TODO(TheLovinator): Make this configurable.
|
||||
scheduler.add_job(send_to_discord, "interval", minutes=15, next_run_time=datetime.now(tz=timezone.utc))
|
||||
scheduler.start()
|
||||
yield
|
||||
reader.close()
|
||||
scheduler.shutdown(wait=True)
|
||||
|
||||
|
||||
app: FastAPI = FastAPI()
|
||||
app.mount("/static", StaticFiles(directory="discord_rss_bot/static"), name="static")
|
||||
templates: Jinja2Templates = Jinja2Templates(directory="discord_rss_bot/templates")
|
||||
|
||||
reader: Reader = get_reader()
|
||||
|
||||
# Add the filters to the Jinja2 environment so they can be used in html templates.
|
||||
templates.env.filters["encode_url"] = encode_url
|
||||
@ -70,7 +95,7 @@ async def post_delete_webhook(webhook_url: str = Form()) -> RedirectResponse:
|
||||
Args:
|
||||
webhook_url: The url of the webhook.
|
||||
"""
|
||||
# TODO: Check if the webhook is in use by any feeds before deleting it.
|
||||
# TODO(TheLovinator): Check if the webhook is in use by any feeds before deleting it.
|
||||
remove_webhook(reader, webhook_url)
|
||||
return RedirectResponse(url="/", status_code=303)
|
||||
|
||||
@ -131,19 +156,19 @@ async def post_set_whitelist(
|
||||
"""
|
||||
clean_feed_url: str = feed_url.strip()
|
||||
if whitelist_title:
|
||||
reader.set_tag(clean_feed_url, "whitelist_title", whitelist_title) # type: ignore
|
||||
reader.set_tag(clean_feed_url, "whitelist_title", whitelist_title) # type: ignore[call-overload]
|
||||
if whitelist_summary:
|
||||
reader.set_tag(clean_feed_url, "whitelist_summary", whitelist_summary) # type: ignore
|
||||
reader.set_tag(clean_feed_url, "whitelist_summary", whitelist_summary) # type: ignore[call-overload]
|
||||
if whitelist_content:
|
||||
reader.set_tag(clean_feed_url, "whitelist_content", whitelist_content) # type: ignore
|
||||
reader.set_tag(clean_feed_url, "whitelist_content", whitelist_content) # type: ignore[call-overload]
|
||||
if whitelist_author:
|
||||
reader.set_tag(clean_feed_url, "whitelist_author", whitelist_author) # type: ignore
|
||||
reader.set_tag(clean_feed_url, "whitelist_author", whitelist_author) # type: ignore[call-overload]
|
||||
|
||||
return RedirectResponse(url=f"/feed/?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||
|
||||
|
||||
@app.get("/whitelist", response_class=HTMLResponse)
|
||||
async def get_whitelist(feed_url: str, request: Request): # noqa: ANN201
|
||||
async def get_whitelist(feed_url: str, request: Request):
|
||||
"""Get the whitelist.
|
||||
|
||||
Args:
|
||||
@ -167,7 +192,7 @@ async def get_whitelist(feed_url: str, request: Request): # noqa: ANN201
|
||||
"whitelist_content": whitelist_content,
|
||||
"whitelist_author": whitelist_author,
|
||||
}
|
||||
return templates.TemplateResponse("whitelist.html", context)
|
||||
return templates.TemplateResponse(request=request, name="whitelist.html", context=context)
|
||||
|
||||
|
||||
@app.post("/blacklist")
|
||||
@ -192,19 +217,28 @@ async def post_set_blacklist(
|
||||
"""
|
||||
clean_feed_url: str = feed_url.strip()
|
||||
if blacklist_title:
|
||||
reader.set_tag(clean_feed_url, "blacklist_title", blacklist_title) # type: ignore
|
||||
reader.set_tag(clean_feed_url, "blacklist_title", blacklist_title) # type: ignore[call-overload]
|
||||
if blacklist_summary:
|
||||
reader.set_tag(clean_feed_url, "blacklist_summary", blacklist_summary) # type: ignore
|
||||
reader.set_tag(clean_feed_url, "blacklist_summary", blacklist_summary) # type: ignore[call-overload]
|
||||
if blacklist_content:
|
||||
reader.set_tag(clean_feed_url, "blacklist_content", blacklist_content) # type: ignore
|
||||
reader.set_tag(clean_feed_url, "blacklist_content", blacklist_content) # type: ignore[call-overload]
|
||||
if blacklist_author:
|
||||
reader.set_tag(clean_feed_url, "blacklist_author", blacklist_author) # type: ignore
|
||||
reader.set_tag(clean_feed_url, "blacklist_author", blacklist_author) # type: ignore[call-overload]
|
||||
|
||||
return RedirectResponse(url=f"/feed/?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||
|
||||
|
||||
@app.get("/blacklist", response_class=HTMLResponse)
|
||||
async def get_blacklist(feed_url: str, request: Request): # noqa: ANN201
|
||||
async def get_blacklist(feed_url: str, request: Request):
|
||||
"""Get the blacklist.
|
||||
|
||||
Args:
|
||||
feed_url: What feed we should get the blacklist for.
|
||||
request: The request object.
|
||||
|
||||
Returns:
|
||||
HTMLResponse: The blacklist page.
|
||||
"""
|
||||
feed: Feed = reader.get_feed(urllib.parse.unquote(feed_url))
|
||||
|
||||
# Get previous data, this is used when creating the form.
|
||||
@ -221,7 +255,7 @@ async def get_blacklist(feed_url: str, request: Request): # noqa: ANN201
|
||||
"blacklist_content": blacklist_content,
|
||||
"blacklist_author": blacklist_author,
|
||||
}
|
||||
return templates.TemplateResponse("blacklist.html", context)
|
||||
return templates.TemplateResponse(request=request, name="blacklist.html", context=context)
|
||||
|
||||
|
||||
@app.post("/custom")
|
||||
@ -232,17 +266,23 @@ async def post_set_custom(custom_message: str = Form(""), feed_url: str = Form()
|
||||
custom_message: The custom message.
|
||||
feed_url: The feed we should set the custom message for.
|
||||
"""
|
||||
if custom_message:
|
||||
reader.set_tag(feed_url, "custom_message", custom_message.strip()) # type: ignore
|
||||
our_custom_message: JSONType | str = custom_message.strip()
|
||||
our_custom_message = typing.cast(JSONType, our_custom_message)
|
||||
|
||||
default_custom_message: JSONType | str = settings.default_custom_message
|
||||
default_custom_message = typing.cast(JSONType, default_custom_message)
|
||||
|
||||
if our_custom_message:
|
||||
reader.set_tag(feed_url, "custom_message", our_custom_message)
|
||||
else:
|
||||
reader.set_tag(feed_url, "custom_message", settings.default_custom_message) # type: ignore
|
||||
reader.set_tag(feed_url, "custom_message", default_custom_message)
|
||||
|
||||
clean_feed_url: str = feed_url.strip()
|
||||
return RedirectResponse(url=f"/feed/?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||
|
||||
|
||||
@app.get("/custom", response_class=HTMLResponse)
|
||||
async def get_custom(feed_url: str, request: Request): # noqa: ANN201
|
||||
async def get_custom(feed_url: str, request: Request):
|
||||
"""Get the custom message. This is used when sending the message to Discord.
|
||||
|
||||
Args:
|
||||
@ -261,11 +301,11 @@ async def get_custom(feed_url: str, request: Request): # noqa: ANN201
|
||||
for entry in reader.get_entries(feed=feed, limit=1):
|
||||
context["entry"] = entry
|
||||
|
||||
return templates.TemplateResponse("custom.html", context)
|
||||
return templates.TemplateResponse(request=request, name="custom.html", context=context)
|
||||
|
||||
|
||||
@app.get("/embed", response_class=HTMLResponse)
|
||||
async def get_embed_page(feed_url: str, request: Request): # noqa: ANN201
|
||||
async def get_embed_page(feed_url: str, request: Request):
|
||||
"""Get the custom message. This is used when sending the message to Discord.
|
||||
|
||||
Args:
|
||||
@ -297,11 +337,11 @@ async def get_embed_page(feed_url: str, request: Request): # noqa: ANN201
|
||||
for entry in reader.get_entries(feed=feed, limit=1):
|
||||
# Append to context.
|
||||
context["entry"] = entry
|
||||
return templates.TemplateResponse("embed.html", context)
|
||||
return templates.TemplateResponse(request=request, name="embed.html", context=context)
|
||||
|
||||
|
||||
@app.post("/embed", response_class=HTMLResponse)
|
||||
async def post_embed( # noqa: PLR0913
|
||||
async def post_embed( # noqa: PLR0913, PLR0917
|
||||
feed_url: str = Form(),
|
||||
title: str = Form(""),
|
||||
description: str = Form(""),
|
||||
@ -385,22 +425,23 @@ async def post_use_text(feed_url: str = Form()) -> RedirectResponse:
|
||||
|
||||
|
||||
@app.get("/add", response_class=HTMLResponse)
|
||||
def get_add(request: Request): # noqa: ANN201
|
||||
def get_add(request: Request):
|
||||
"""Page for adding a new feed."""
|
||||
context = {
|
||||
"request": request,
|
||||
"webhooks": reader.get_tag((), "webhooks", []),
|
||||
}
|
||||
return templates.TemplateResponse("add.html", context)
|
||||
return templates.TemplateResponse(request=request, name="add.html", context=context)
|
||||
|
||||
|
||||
@app.get("/feed", response_class=HTMLResponse)
|
||||
async def get_feed(feed_url: str, request: Request): # noqa: ANN201
|
||||
async def get_feed(feed_url: str, request: Request, starting_after: str | None = None):
|
||||
"""Get a feed by URL.
|
||||
|
||||
Args:
|
||||
feed_url: The feed to add.
|
||||
request: The request object.
|
||||
starting_after: The entry to start after. Used for pagination.
|
||||
|
||||
Returns:
|
||||
HTMLResponse: The feed page.
|
||||
@ -410,7 +451,7 @@ async def get_feed(feed_url: str, request: Request): # noqa: ANN201
|
||||
feed: Feed = reader.get_feed(clean_feed_url)
|
||||
|
||||
# Get entries from the feed.
|
||||
entries: Iterable[Entry] = reader.get_entries(feed=clean_feed_url)
|
||||
entries: typing.Iterable[Entry] = reader.get_entries(feed=clean_feed_url, limit=10)
|
||||
|
||||
# Create the html for the entries.
|
||||
html: str = create_html_for_feed(entries)
|
||||
@ -428,8 +469,49 @@ async def get_feed(feed_url: str, request: Request): # noqa: ANN201
|
||||
"feed_counts": reader.get_feed_counts(feed=clean_feed_url),
|
||||
"html": html,
|
||||
"should_send_embed": should_send_embed,
|
||||
"show_more_button": True,
|
||||
}
|
||||
return templates.TemplateResponse("feed.html", context)
|
||||
return templates.TemplateResponse(request=request, name="feed.html", context=context)
|
||||
|
||||
|
||||
@app.get("/feed_more", response_class=HTMLResponse)
|
||||
async def get_all_entries(feed_url: str, request: Request):
|
||||
"""Get a feed by URL and show more entries.
|
||||
|
||||
Args:
|
||||
feed_url: The feed to add.
|
||||
request: The request object.
|
||||
starting_after: The entry to start after. Used for pagination.
|
||||
|
||||
Returns:
|
||||
HTMLResponse: The feed page.
|
||||
"""
|
||||
clean_feed_url: str = urllib.parse.unquote(feed_url.strip())
|
||||
|
||||
feed: Feed = reader.get_feed(clean_feed_url)
|
||||
|
||||
# Get entries from the feed.
|
||||
entries: typing.Iterable[Entry] = reader.get_entries(feed=clean_feed_url, limit=200)
|
||||
|
||||
# Create the html for the entries.
|
||||
html: str = create_html_for_feed(entries)
|
||||
|
||||
try:
|
||||
should_send_embed: bool = bool(reader.get_tag(feed, "should_send_embed"))
|
||||
except TagNotFoundError:
|
||||
add_missing_tags(reader)
|
||||
should_send_embed: bool = bool(reader.get_tag(feed, "should_send_embed"))
|
||||
|
||||
context = {
|
||||
"request": request,
|
||||
"feed": feed,
|
||||
"entries": entries,
|
||||
"feed_counts": reader.get_feed_counts(feed=clean_feed_url),
|
||||
"html": html,
|
||||
"should_send_embed": should_send_embed,
|
||||
"show_more_button": False,
|
||||
}
|
||||
return templates.TemplateResponse(request=request, name="feed.html", context=context)
|
||||
|
||||
|
||||
def create_html_for_feed(entries: Iterable[Entry]) -> str:
|
||||
@ -478,7 +560,7 @@ def create_html_for_feed(entries: Iterable[Entry]) -> str:
|
||||
|
||||
|
||||
@app.get("/add_webhook", response_class=HTMLResponse)
|
||||
async def get_add_webhook(request: Request): # noqa: ANN201
|
||||
async def get_add_webhook(request: Request):
|
||||
"""Page for adding a new webhook.
|
||||
|
||||
Args:
|
||||
@ -487,7 +569,7 @@ async def get_add_webhook(request: Request): # noqa: ANN201
|
||||
Returns:
|
||||
HTMLResponse: The add webhook page.
|
||||
"""
|
||||
return templates.TemplateResponse("add_webhook.html", {"request": request})
|
||||
return templates.TemplateResponse(request=request, name="add_webhook.html", context={"request": request})
|
||||
|
||||
|
||||
@dataclass()
|
||||
@ -533,7 +615,7 @@ def get_data_from_hook_url(hook_name: str, hook_url: str) -> WebhookInfo:
|
||||
|
||||
|
||||
@app.get("/webhooks", response_class=HTMLResponse)
|
||||
async def get_webhooks(request: Request): # noqa: ANN201
|
||||
async def get_webhooks(request: Request):
|
||||
"""Page for adding a new webhook.
|
||||
|
||||
Args:
|
||||
@ -549,11 +631,11 @@ async def get_webhooks(request: Request): # noqa: ANN201
|
||||
hooks_with_data.append(our_hook)
|
||||
|
||||
context = {"request": request, "hooks_with_data": hooks_with_data}
|
||||
return templates.TemplateResponse("webhooks.html", context)
|
||||
return templates.TemplateResponse(request=request, name="webhooks.html", context=context)
|
||||
|
||||
|
||||
@app.get("/", response_class=HTMLResponse)
|
||||
def get_index(request: Request): # noqa: ANN201
|
||||
def get_index(request: Request):
|
||||
"""This is the root of the website.
|
||||
|
||||
Args:
|
||||
@ -562,10 +644,10 @@ def get_index(request: Request): # noqa: ANN201
|
||||
Returns:
|
||||
HTMLResponse: The index page.
|
||||
"""
|
||||
return templates.TemplateResponse("index.html", make_context_index(request))
|
||||
return templates.TemplateResponse(request=request, name="index.html", context=make_context_index(request))
|
||||
|
||||
|
||||
def make_context_index(request: Request): # noqa: ANN201
|
||||
def make_context_index(request: Request):
|
||||
"""Create the needed context for the index page.
|
||||
|
||||
Args:
|
||||
@ -605,7 +687,7 @@ def make_context_index(request: Request): # noqa: ANN201
|
||||
|
||||
|
||||
@app.post("/remove", response_class=HTMLResponse)
|
||||
async def remove_feed(feed_url: str = Form()): # noqa: ANN201
|
||||
async def remove_feed(feed_url: str = Form()):
|
||||
"""Get a feed by URL.
|
||||
|
||||
Args:
|
||||
@ -623,7 +705,7 @@ async def remove_feed(feed_url: str = Form()): # noqa: ANN201
|
||||
|
||||
|
||||
@app.get("/search", response_class=HTMLResponse)
|
||||
async def search(request: Request, query: str): # noqa: ANN201
|
||||
async def search(request: Request, query: str):
|
||||
"""Get entries matching a full-text search query.
|
||||
|
||||
Args:
|
||||
@ -641,11 +723,11 @@ async def search(request: Request, query: str): # noqa: ANN201
|
||||
"query": query,
|
||||
"search_amount": reader.search_entry_counts(query),
|
||||
}
|
||||
return templates.TemplateResponse("search.html", context)
|
||||
return templates.TemplateResponse(request=request, name="search.html", context=context)
|
||||
|
||||
|
||||
@app.get("/post_entry", response_class=HTMLResponse)
|
||||
async def post_entry(entry_id: str): # noqa: ANN201
|
||||
async def post_entry(entry_id: str):
|
||||
"""Send single entry to Discord.
|
||||
|
||||
Args:
|
||||
@ -668,7 +750,7 @@ async def post_entry(entry_id: str): # noqa: ANN201
|
||||
|
||||
|
||||
@app.post("/modify_webhook", response_class=HTMLResponse)
|
||||
def modify_webhook(old_hook: str = Form(), new_hook: str = Form()): # noqa: ANN201
|
||||
def modify_webhook(old_hook: str = Form(), new_hook: str = Form()):
|
||||
"""Modify a webhook.
|
||||
|
||||
Args:
|
||||
@ -682,7 +764,7 @@ def modify_webhook(old_hook: str = Form(), new_hook: str = Form()): # noqa: ANN
|
||||
webhooks = list(reader.get_tag((), "webhooks", []))
|
||||
|
||||
# Webhooks are stored as a list of dictionaries.
|
||||
# Example: [{"name": "webhook_name", "url": "webhook_url"}] # noqa: ERA001
|
||||
# Example: [{"name": "webhook_name", "url": "webhook_url"}]
|
||||
webhooks = cast(list[dict[str, str]], webhooks)
|
||||
|
||||
for hook in webhooks:
|
||||
@ -712,24 +794,8 @@ def modify_webhook(old_hook: str = Form(), new_hook: str = Form()): # noqa: ANN
|
||||
return RedirectResponse(url="/webhooks", status_code=303)
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
def startup() -> None:
|
||||
"""This is called when the server starts.
|
||||
|
||||
It adds missing tags and starts the scheduler.
|
||||
"""
|
||||
add_missing_tags(reader=reader)
|
||||
|
||||
scheduler: BackgroundScheduler = BackgroundScheduler()
|
||||
|
||||
# Update all feeds every 15 minutes.
|
||||
# TODO: Make this configurable.
|
||||
scheduler.add_job(send_to_discord, "interval", minutes=15, next_run_time=datetime.now(tz=timezone.utc))
|
||||
scheduler.start()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# TODO: Make this configurable.
|
||||
# TODO(TheLovinator): Make this configurable.
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
log_level="info",
|
||||
|
@ -35,6 +35,7 @@ def convert_html_to_md(html: str) -> str:
|
||||
link.decompose()
|
||||
else:
|
||||
link_text: str = link.text or link.get("href")
|
||||
link_text = link_text.replace("http://", "").replace("https://", "")
|
||||
link.replace_with(f"[{link_text}]({link.get('href')})")
|
||||
|
||||
for strikethrough in soup.find_all("s") + soup.find_all("del") + soup.find_all("strike"):
|
||||
|
@ -4,60 +4,88 @@ from discord_rss_bot.settings import default_custom_embed, default_custom_messag
|
||||
|
||||
|
||||
def add_custom_message(reader: Reader, feed: Feed) -> None:
|
||||
"""Add the custom message tag to the feed if it doesn't exist.
|
||||
|
||||
Args:
|
||||
reader: What Reader to use.
|
||||
feed: The feed to add the tag to.
|
||||
"""
|
||||
try:
|
||||
reader.get_tag(feed, "custom_message")
|
||||
except TagNotFoundError:
|
||||
print(f"Adding custom_message tag to '{feed.url}'")
|
||||
reader.set_tag(feed.url, "custom_message", default_custom_message) # type: ignore
|
||||
reader.set_tag(feed.url, "has_custom_message", True) # type: ignore
|
||||
|
||||
|
||||
def add_has_custom_message(reader: Reader, feed: Feed) -> None:
|
||||
"""Add the has_custom_message tag to the feed if it doesn't exist.
|
||||
|
||||
Args:
|
||||
reader: What Reader to use.
|
||||
feed: The feed to add the tag to.
|
||||
"""
|
||||
try:
|
||||
reader.get_tag(feed, "has_custom_message")
|
||||
except TagNotFoundError:
|
||||
if reader.get_tag(feed, "custom_message") == default_custom_message:
|
||||
print(f"Setting has_custom_message tag to False for '{feed.url}'")
|
||||
reader.set_tag(feed.url, "has_custom_message", False) # type: ignore
|
||||
else:
|
||||
print(f"Setting has_custom_message tag to True for '{feed.url}'")
|
||||
reader.set_tag(feed.url, "has_custom_message", True) # type: ignore
|
||||
|
||||
|
||||
def add_if_embed(reader: Reader, feed: Feed) -> None:
|
||||
"""Add the if_embed tag to the feed if it doesn't exist.
|
||||
|
||||
Args:
|
||||
reader: What Reader to use.
|
||||
feed: The feed to add the tag to.
|
||||
"""
|
||||
try:
|
||||
reader.get_tag(feed, "if_embed")
|
||||
except TagNotFoundError:
|
||||
print(f"Setting if_embed tag to True for '{feed.url}'")
|
||||
reader.set_tag(feed.url, "if_embed", True) # type: ignore
|
||||
|
||||
|
||||
def add_custom_embed(reader: Reader, feed: Feed) -> None:
|
||||
"""Add the custom embed tag to the feed if it doesn't exist.
|
||||
|
||||
Args:
|
||||
reader: What Reader to use.
|
||||
feed: The feed to add the tag to.
|
||||
"""
|
||||
try:
|
||||
reader.get_tag(feed, "embed")
|
||||
except TagNotFoundError:
|
||||
print(f"Setting embed tag to default for '{feed.url}'")
|
||||
reader.set_tag(feed.url, "embed", default_custom_embed) # type: ignore
|
||||
reader.set_tag(feed.url, "has_custom_embed", True) # type: ignore
|
||||
|
||||
|
||||
def add_has_custom_embed(reader: Reader, feed: Feed) -> None:
|
||||
"""Add the has_custom_embed tag to the feed if it doesn't exist.
|
||||
|
||||
Args:
|
||||
reader: What Reader to use.
|
||||
feed: The feed to add the tag to.
|
||||
"""
|
||||
try:
|
||||
reader.get_tag(feed, "has_custom_embed")
|
||||
except TagNotFoundError:
|
||||
if reader.get_tag(feed, "embed") == default_custom_embed:
|
||||
print(f"Setting has_custom_embed tag to False for '{feed.url}'")
|
||||
reader.set_tag(feed.url, "has_custom_embed", False) # type: ignore
|
||||
else:
|
||||
print(f"Setting has_custom_embed tag to True for '{feed.url}'")
|
||||
reader.set_tag(feed.url, "has_custom_embed", True) # type: ignore
|
||||
|
||||
|
||||
def add_should_send_embed(reader: Reader, feed: Feed) -> None:
|
||||
"""Add the should_send_embed tag to the feed if it doesn't exist.
|
||||
|
||||
Args:
|
||||
reader: What Reader to use.
|
||||
feed: The feed to add the tag to.
|
||||
"""
|
||||
try:
|
||||
reader.get_tag(feed, "should_send_embed")
|
||||
except TagNotFoundError:
|
||||
print(f"Setting should_send_embed tag to True for '{feed.url}'")
|
||||
reader.set_tag(feed.url, "should_send_embed", True) # type: ignore
|
||||
|
||||
|
||||
|
@ -1,13 +1,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import urllib.parse
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from reader import EntrySearchResult, Feed, HighlightedString, Reader
|
||||
|
||||
from discord_rss_bot.settings import get_reader
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
from reader import EntrySearchResult, Feed, HighlightedString, Reader
|
||||
|
||||
|
||||
def create_html_for_search_results(query: str, custom_reader: Reader | None = None) -> str:
|
||||
"""Create HTML for the search results.
|
||||
@ -19,8 +21,8 @@ def create_html_for_search_results(query: str, custom_reader: Reader | None = No
|
||||
Returns:
|
||||
str: The HTML.
|
||||
"""
|
||||
# TODO: There is a .content that also contains text, we should use that if .summary is not available.
|
||||
# TODO: We should also add <span> tags to the title.
|
||||
# TODO(TheLovinator): There is a .content that also contains text, we should use that if .summary is not available.
|
||||
# TODO(TheLovinator): We should also add <span> tags to the title.
|
||||
|
||||
# Get the default reader if we didn't get a custom one.
|
||||
reader: Reader = get_reader() if custom_reader is None else custom_reader
|
||||
@ -55,7 +57,7 @@ def add_span_with_slice(highlighted_string: HighlightedString) -> str:
|
||||
Returns:
|
||||
str: The string with added <span> tags.
|
||||
"""
|
||||
# TODO: We are looping through the highlights and only using the last one. We should use all of them.
|
||||
# TODO(TheLovinator): We are looping through the highlights and only using the last one. We should use all of them.
|
||||
before_span, span_part, after_span = "", "", ""
|
||||
|
||||
for txt_slice in highlighted_string.highlights:
|
||||
|
@ -1,15 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import typing
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
|
||||
from platformdirs import user_data_dir
|
||||
from reader import Reader, make_reader
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from reader.types import JSONType
|
||||
|
||||
data_dir: str = user_data_dir(appname="discord_rss_bot", appauthor="TheLovinator", roaming=True, ensure_exists=True)
|
||||
print(f"Data is stored in '{data_dir}'.")
|
||||
|
||||
|
||||
# TODO: Add default things to the database and make the edible.
|
||||
default_custom_message: str = "{{entry_title}}\n{{entry_link}}"
|
||||
logger: logging.Logger = logging.getLogger("discord_rss_bot")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
stream_handler = logging.StreamHandler(sys.stdout)
|
||||
log_formatter = logging.Formatter(
|
||||
"%(asctime)s [%(processName)s: %(process)d] [%(threadName)s: %(thread)d] [%(levelname)s] %(name)s: %(message)s",
|
||||
)
|
||||
stream_handler.setFormatter(log_formatter)
|
||||
logger.addHandler(stream_handler)
|
||||
|
||||
|
||||
# TODO(TheLovinator): Add default things to the database and make the edible.
|
||||
default_custom_message: JSONType | str = "{{entry_title}}\n{{entry_link}}"
|
||||
default_custom_embed: dict[str, str] = {
|
||||
"title": "{{entry_title}}",
|
||||
"description": "{{entry_text}}",
|
||||
|
6
discord_rss_bot/static/bootstrap.min.css
vendored
Normal file
6
discord_rss_bot/static/bootstrap.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
7
discord_rss_bot/static/bootstrap.min.js
vendored
Normal file
7
discord_rss_bot/static/bootstrap.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -5,3 +5,11 @@ body {
|
||||
.border {
|
||||
background: #161616;
|
||||
}
|
||||
|
||||
.text-muted {
|
||||
color: #888888 !important;
|
||||
}
|
||||
|
||||
.form-text {
|
||||
color: #888888;
|
||||
}
|
||||
|
@ -7,10 +7,7 @@
|
||||
content="Stay updated with the latest news and events with our easy-to-use RSS bot. Never miss a message or announcement again with real-time notifications directly to your Discord server." />
|
||||
<meta name="keywords"
|
||||
content="discord, rss, bot, notifications, announcements, updates, real-time, server, messages, news, events, feed." />
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/css/bootstrap.min.css"
|
||||
rel="stylesheet"
|
||||
integrity="sha384-rbsA2VBKQhggwzxH7pPCaAqO46MgnOM80zW1RWuH61DGLwZJEdK2Kadq2F9CUG65"
|
||||
crossorigin="anonymous" />
|
||||
<link href="/static/bootstrap.min.css" rel="stylesheet" />
|
||||
<link href="/static/styles.css" rel="stylesheet" />
|
||||
<link rel="icon" href="/static/favicon.ico" type="image/x-icon" />
|
||||
<title>discord-rss-bot
|
||||
@ -46,7 +43,6 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-kenU1KFdBIe4zVF0s0G1M5b4hcpxyD9F7jL+jjXkk+Q2h455rYXK/7HAuoJl+0I4" crossorigin="anonymous">
|
||||
</script>
|
||||
<script src="/static/bootstrap.min.js" defer></script>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -61,4 +61,8 @@
|
||||
<pre>
|
||||
{{ html|safe }}
|
||||
</pre>
|
||||
{% if show_more_button %}
|
||||
<a class="btn btn-dark"
|
||||
href="/feed_more?feed_url={{ feed.url|encode_url }}">Show more (Note: This view is not optimized at all, so be ready to wait a while)</a>
|
||||
{% endif %}
|
||||
{% endblock content %}
|
||||
|
@ -10,19 +10,6 @@
|
||||
<br />
|
||||
{% for hook in hooks_with_data %}
|
||||
<div class="p-2 border border-dark text-muted">
|
||||
{% if hook.avatar is not none %}
|
||||
<img src="https://cdn.discordapp.com/avatars/{{ hook.id }}/{{ hook.avatar }}.webp"
|
||||
class="img-thumbnail"
|
||||
height="128"
|
||||
width="128"
|
||||
alt="Webhook avatar" />
|
||||
{% else %}
|
||||
<img src="https://cdn.discordapp.com/embed/avatars/{{ hook.avatar_mod }}.png"
|
||||
class="img-thumbnail"
|
||||
height="128"
|
||||
width="128"
|
||||
alt="Default Discord avatar" />
|
||||
{% endif %}
|
||||
<h3>{{ hook.custom_name }}</h3>
|
||||
<li>
|
||||
<strong>Name</strong>: {{ hook.name }}
|
||||
|
@ -21,7 +21,7 @@ def add_webhook(reader: Reader, webhook_name: str, webhook_url: str) -> None:
|
||||
webhooks = list(reader.get_tag((), "webhooks", []))
|
||||
|
||||
# Webhooks are stored as a list of dictionaries.
|
||||
# Example: [{"name": "webhook_name", "url": "webhook_url"}] # noqa: ERA001
|
||||
# Example: [{"name": "webhook_name", "url": "webhook_url"}]
|
||||
webhooks = cast(list[dict[str, str]], webhooks)
|
||||
|
||||
# Only add the webhook if it doesn't already exist.
|
||||
@ -35,8 +35,8 @@ def add_webhook(reader: Reader, webhook_name: str, webhook_url: str) -> None:
|
||||
add_missing_tags(reader)
|
||||
return
|
||||
|
||||
# TODO: Show this error on the page.
|
||||
# TODO: Replace HTTPException with a custom exception.
|
||||
# TODO(TheLovinator): Show this error on the page.
|
||||
# TODO(TheLovinator): Replace HTTPException with a custom exception.
|
||||
raise HTTPException(status_code=409, detail="Webhook already exists")
|
||||
|
||||
|
||||
@ -51,12 +51,12 @@ def remove_webhook(reader: Reader, webhook_url: str) -> None:
|
||||
HTTPException: If webhook could not be deleted
|
||||
HTTPException: Webhook not found
|
||||
"""
|
||||
# TODO: Replace HTTPException with a custom exception for both of these.
|
||||
# TODO(TheLovinator): Replace HTTPException with a custom exception for both of these.
|
||||
# Get current webhooks from the database if they exist otherwise use an empty list.
|
||||
webhooks = list(reader.get_tag((), "webhooks", []))
|
||||
|
||||
# Webhooks are stored as a list of dictionaries.
|
||||
# Example: [{"name": "webhook_name", "url": "webhook_url"}] # noqa: ERA001
|
||||
# Example: [{"name": "webhook_name", "url": "webhook_url"}]
|
||||
webhooks = cast(list[dict[str, str]], webhooks)
|
||||
|
||||
# Only add the webhook if it doesn't already exist.
|
||||
@ -72,5 +72,5 @@ def remove_webhook(reader: Reader, webhook_url: str) -> None:
|
||||
reader.set_tag((), "webhooks", webhooks) # type: ignore
|
||||
return
|
||||
|
||||
# TODO: Show this error on the page.
|
||||
# TODO(TheLovinator): Show this error on the page.
|
||||
raise HTTPException(status_code=404, detail="Webhook not found")
|
||||
|
@ -4,12 +4,18 @@ services:
|
||||
container_name: discord-rss-bot
|
||||
expose:
|
||||
- "5000:5000"
|
||||
ports:
|
||||
- "5000:5000"
|
||||
volumes:
|
||||
- /Docker/Bots/discord-rss-bot:/home/botuser/.local/share/discord_rss_bot/
|
||||
# - /Docker/Bots/discord-rss-bot:/home/botuser/.local/share/discord_rss_bot/
|
||||
- data:/home/botuser/.local/share/discord_rss_bot/
|
||||
healthcheck:
|
||||
test: [ "CMD", "poetry", "run", "python", "discord_rss_bot/healthcheck.py" ]
|
||||
test: ["CMD", "python", "discord_rss_bot/healthcheck.py"]
|
||||
interval: 1m
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 5s
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
|
148
poetry.lock
generated
148
poetry.lock
generated
@ -91,6 +91,17 @@ files = [
|
||||
{file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfgv"
|
||||
version = "3.4.0"
|
||||
description = "Validate configuration and produce human readable error messages."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
|
||||
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.3.2"
|
||||
@ -247,6 +258,17 @@ requests = ">=2.28.1,<3.0.0"
|
||||
[package.extras]
|
||||
async = ["httpx (>=0.23.0,<0.24.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "distlib"
|
||||
version = "0.3.8"
|
||||
description = "Distribution utilities"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
|
||||
{file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "djlint"
|
||||
version = "1.34.1"
|
||||
@ -314,6 +336,22 @@ files = [
|
||||
[package.dependencies]
|
||||
sgmllib3k = "*"
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.14.0"
|
||||
description = "A platform independent file lock."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"},
|
||||
{file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
|
||||
typing = ["typing-extensions (>=4.8)"]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
@ -440,6 +478,20 @@ cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
|
||||
http2 = ["h2 (>=3,<5)"]
|
||||
socks = ["socksio (==1.*)"]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.5.36"
|
||||
description = "File identification library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"},
|
||||
{file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
license = ["ukkonen"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.7"
|
||||
@ -692,6 +744,20 @@ files = [
|
||||
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.8.0"
|
||||
description = "Node.js virtual environment builder"
|
||||
optional = false
|
||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
|
||||
files = [
|
||||
{file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
|
||||
{file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "24.0"
|
||||
@ -744,6 +810,24 @@ files = [
|
||||
dev = ["pre-commit", "tox"]
|
||||
testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "3.7.1"
|
||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"},
|
||||
{file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cfgv = ">=2.0.0"
|
||||
identify = ">=1.0.0"
|
||||
nodeenv = ">=0.11.1"
|
||||
pyyaml = ">=5.1"
|
||||
virtualenv = ">=20.10.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.7.1"
|
||||
@ -1123,6 +1207,48 @@ urllib3 = ">=1.21.1,<3"
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.4.4"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.4.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:29d44ef5bb6a08e235c8249294fa8d431adc1426bfda99ed493119e6f9ea1bf6"},
|
||||
{file = "ruff-0.4.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c4efe62b5bbb24178c950732ddd40712b878a9b96b1d02b0ff0b08a090cbd891"},
|
||||
{file = "ruff-0.4.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c8e2f1e8fc12d07ab521a9005d68a969e167b589cbcaee354cb61e9d9de9c15"},
|
||||
{file = "ruff-0.4.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60ed88b636a463214905c002fa3eaab19795679ed55529f91e488db3fe8976ab"},
|
||||
{file = "ruff-0.4.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b90fc5e170fc71c712cc4d9ab0e24ea505c6a9e4ebf346787a67e691dfb72e85"},
|
||||
{file = "ruff-0.4.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8e7e6ebc10ef16dcdc77fd5557ee60647512b400e4a60bdc4849468f076f6eef"},
|
||||
{file = "ruff-0.4.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9ddb2c494fb79fc208cd15ffe08f32b7682519e067413dbaf5f4b01a6087bcd"},
|
||||
{file = "ruff-0.4.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c51c928a14f9f0a871082603e25a1588059b7e08a920f2f9fa7157b5bf08cfe9"},
|
||||
{file = "ruff-0.4.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5eb0a4bfd6400b7d07c09a7725e1a98c3b838be557fee229ac0f84d9aa49c36"},
|
||||
{file = "ruff-0.4.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b1867ee9bf3acc21778dcb293db504692eda5f7a11a6e6cc40890182a9f9e595"},
|
||||
{file = "ruff-0.4.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1aecced1269481ef2894cc495647392a34b0bf3e28ff53ed95a385b13aa45768"},
|
||||
{file = "ruff-0.4.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9da73eb616b3241a307b837f32756dc20a0b07e2bcb694fec73699c93d04a69e"},
|
||||
{file = "ruff-0.4.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:958b4ea5589706a81065e2a776237de2ecc3e763342e5cc8e02a4a4d8a5e6f95"},
|
||||
{file = "ruff-0.4.4-py3-none-win32.whl", hash = "sha256:cb53473849f011bca6e754f2cdf47cafc9c4f4ff4570003a0dad0b9b6890e876"},
|
||||
{file = "ruff-0.4.4-py3-none-win_amd64.whl", hash = "sha256:424e5b72597482543b684c11def82669cc6b395aa8cc69acc1858b5ef3e5daae"},
|
||||
{file = "ruff-0.4.4-py3-none-win_arm64.whl", hash = "sha256:39df0537b47d3b597293edbb95baf54ff5b49589eb7ff41926d8243caa995ea6"},
|
||||
{file = "ruff-0.4.4.tar.gz", hash = "sha256:f87ea42d5cdebdc6a69761a9d0bc83ae9b3b30d0ad78952005ba6568d6c022af"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "69.5.1"
|
||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"},
|
||||
{file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
|
||||
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "sgmllib3k"
|
||||
version = "1.0.0"
|
||||
@ -1339,6 +1465,26 @@ files = [
|
||||
docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
|
||||
test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "virtualenv"
|
||||
version = "20.26.2"
|
||||
description = "Virtual Python Environment builder"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"},
|
||||
{file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
distlib = ">=0.3.7,<1"
|
||||
filelock = ">=3.12.2,<4"
|
||||
platformdirs = ">=3.9.1,<5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
|
||||
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
|
||||
|
||||
[[package]]
|
||||
name = "watchfiles"
|
||||
version = "0.21.0"
|
||||
@ -1527,4 +1673,4 @@ watchdog = ["watchdog (>=2.3)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.12"
|
||||
content-hash = "b82e82a7c33893eb46dd535e8c0bb3229b2f42ad7316868de3f72c30320c7ead"
|
||||
content-hash = "1a20eeb21e0dad90c4116b164c8d7a796e53b2bfad916ed494970ee84ee2de52"
|
||||
|
130
pyproject.toml
130
pyproject.toml
@ -1,28 +1,30 @@
|
||||
[tool.poetry]
|
||||
name = "discord-rss-bot"
|
||||
version = "0.2.0"
|
||||
version = "1.0.0"
|
||||
description = "RSS bot for Discord"
|
||||
authors = ["Joakim Hellsén <tlovinator@gmail.com>"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.12"
|
||||
reader = "^3.12"
|
||||
discord-webhook = "^1.3.1"
|
||||
platformdirs = "^3.11.0"
|
||||
fastapi = "^0.110.0"
|
||||
uvicorn = { extras = ["standard"], version = "^0.29.0" }
|
||||
jinja2 = "^3.1.4"
|
||||
apscheduler = "^3.10.4"
|
||||
python-multipart = "^0.0.9"
|
||||
python-dotenv = "^1.0.1"
|
||||
tomlkit = "^0.12.0"
|
||||
beautifulsoup4 = "^4.12.3"
|
||||
lxml = "^4.9.4"
|
||||
discord-webhook = "^1.3.1"
|
||||
fastapi = "^0.110.0"
|
||||
httpx = "^0.27.0"
|
||||
jinja2 = "^3.1.4"
|
||||
lxml = "^4.9.4"
|
||||
platformdirs = "^3.11.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
python-multipart = "^0.0.9"
|
||||
reader = "^3.12"
|
||||
tomlkit = "^0.12.0"
|
||||
uvicorn = { extras = ["standard"], version = "^0.29.0" }
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest = "^7.4.4"
|
||||
djlint = "^1.34.1"
|
||||
pre-commit = "^3.7.1"
|
||||
pytest = "^7.4.4"
|
||||
ruff = "^0.4.4"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
@ -35,80 +37,46 @@ max_line_length = 120
|
||||
format_attribute_template_tags = true
|
||||
|
||||
[tool.ruff]
|
||||
fix = true
|
||||
unsafe-fixes = true
|
||||
preview = true
|
||||
line-length = 120
|
||||
select = [
|
||||
"E",
|
||||
"F",
|
||||
"B",
|
||||
"W",
|
||||
"C90",
|
||||
"I",
|
||||
"N",
|
||||
"D",
|
||||
"UP",
|
||||
"YTT",
|
||||
"ANN",
|
||||
"S",
|
||||
"BLE",
|
||||
# "FBT", # Reader uses positional boolean values in its function calls
|
||||
"A",
|
||||
"COM",
|
||||
"C4",
|
||||
"DTZ",
|
||||
"EM",
|
||||
"EXE",
|
||||
"ISC",
|
||||
"ICN",
|
||||
"G",
|
||||
"INP",
|
||||
"PIE",
|
||||
"T20",
|
||||
"PYI",
|
||||
"PT",
|
||||
"Q",
|
||||
"RSE",
|
||||
"RET",
|
||||
"SLF",
|
||||
"SIM",
|
||||
"TID",
|
||||
"TCH",
|
||||
"ARG",
|
||||
"PTH",
|
||||
"ERA",
|
||||
"PGH",
|
||||
"PL",
|
||||
"PLC",
|
||||
"PLE",
|
||||
"PLR",
|
||||
"PLW",
|
||||
"TRY",
|
||||
"RUF",
|
||||
]
|
||||
ignore = [
|
||||
"D100", # pydocstyle - missing docstring in public module
|
||||
"D101", # pydocstyle - missing docstring in public class
|
||||
"D102", # pydocstyle - missing docstring in public method
|
||||
"D103", # pydocstyle - missing docstring in public function
|
||||
"D104", # pydocstyle - missing docstring in public package
|
||||
"D105", # pydocstyle - missing docstring in magic method
|
||||
"D106", # pydocstyle - missing docstring in public nested class
|
||||
"D107", # pydocstyle - missing docstring in __init__
|
||||
"G002", # Allow % in logging
|
||||
"UP031", # Allow % in logging
|
||||
lint.select = ["ALL"]
|
||||
lint.ignore = [
|
||||
"ANN201", # Checks that public functions and methods have return type annotations.
|
||||
"ARG001", # Checks for the presence of unused arguments in function definitions.
|
||||
"B008", # Allow Form() as a default value
|
||||
"PGH003", # Allow # type: ignore
|
||||
"COM812", # Checks for the absence of trailing commas.
|
||||
"CPY001", # Missing copyright notice at top of file
|
||||
"D100", # Checks for undocumented public module definitions.
|
||||
"D101", # Checks for undocumented public class definitions.
|
||||
"D102", # Checks for undocumented public method definitions.
|
||||
"D104", # Missing docstring in public package.
|
||||
"D105", # Missing docstring in magic method.
|
||||
"D105", # pydocstyle - missing docstring in magic method
|
||||
"D106", # Checks for undocumented public class definitions, for nested classes.
|
||||
"ERA001", # Found commented-out code
|
||||
"FBT003", # Checks for boolean positional arguments in function calls.
|
||||
"FIX002", # Line contains TODO
|
||||
"G002", # Allow % in logging
|
||||
"ISC001", # Checks for implicitly concatenated strings on a single line.
|
||||
"PGH003", # Check for type: ignore annotations that suppress all type warnings, as opposed to targeting specific type warnings.
|
||||
"PLR6301", # Checks for the presence of unused self parameter in methods definitions.
|
||||
"RUF029", # Checks for functions declared async that do not await or otherwise use features requiring the function to be declared async.
|
||||
"TD003", # Checks that a TODO comment is associated with a link to a relevant issue or ticket.
|
||||
]
|
||||
|
||||
[tool.ruff.pydocstyle]
|
||||
[tool.ruff.lint.pydocstyle]
|
||||
convention = "google"
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"tests/*" = ["S101"]
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"tests/*" = ["S101", "D103", "PLR2004"]
|
||||
|
||||
[tool.ruff.lint.mccabe]
|
||||
max-complexity = 15 # Don't judge lol
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "-vvvvvv --exitfirst"
|
||||
filterwarnings = [
|
||||
"ignore:'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning",
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning",
|
||||
"ignore:No parser was explicitly specified:UserWarning",
|
||||
]
|
||||
log_cli = true
|
||||
log_cli_level = "DEBUG"
|
||||
log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)"
|
||||
log_cli_date_format = "%Y-%m-%d %H:%M:%S"
|
||||
|
@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
@ -40,7 +40,7 @@ def test_entry_is_whitelisted() -> None:
|
||||
custom_reader.update_feed("https://lovinator.space/rss_test.xml")
|
||||
|
||||
# whitelist_title
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_title", "fvnnnfnfdnfdnfd") # type: ignore # noqa: E501
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_title", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_whitelisted(entry) is True:
|
||||
assert entry.title == "fvnnnfnfdnfdnfd"
|
||||
@ -48,7 +48,7 @@ def test_entry_is_whitelisted() -> None:
|
||||
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_title")
|
||||
|
||||
# whitelist_summary
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_summary", "fvnnnfnfdnfdnfd") # type: ignore # noqa: E501
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_summary", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_whitelisted(entry) is True:
|
||||
assert entry.summary == "fvnnnfnfdnfdnfd"
|
||||
@ -56,7 +56,7 @@ def test_entry_is_whitelisted() -> None:
|
||||
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "whitelist_summary")
|
||||
|
||||
# whitelist_content
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_content", "fvnnnfnfdnfdnfd") # type: ignore # noqa: E501
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "whitelist_content", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_whitelisted(entry) is True:
|
||||
assert entry.content[0].value == "<p>ffdnfdnfdnfdnfdndfn</p>"
|
||||
@ -81,7 +81,7 @@ def test_entry_is_blacklisted() -> None:
|
||||
custom_reader.update_feed("https://lovinator.space/rss_test.xml")
|
||||
|
||||
# blacklist_title
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_title", "fvnnnfnfdnfdnfd") # type: ignore # noqa: E501
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_title", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_blacklisted(entry) is True:
|
||||
assert entry.title == "fvnnnfnfdnfdnfd"
|
||||
@ -89,7 +89,7 @@ def test_entry_is_blacklisted() -> None:
|
||||
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_title")
|
||||
|
||||
# blacklist_summary
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_summary", "fvnnnfnfdnfdnfd") # type: ignore # noqa: E501
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_summary", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_blacklisted(entry) is True:
|
||||
assert entry.summary == "fvnnnfnfdnfdnfd"
|
||||
@ -97,7 +97,7 @@ def test_entry_is_blacklisted() -> None:
|
||||
custom_reader.delete_tag("https://lovinator.space/rss_test.xml", "blacklist_summary")
|
||||
|
||||
# blacklist_content
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_content", "fvnnnfnfdnfdnfd") # type: ignore # noqa: E501
|
||||
custom_reader.set_tag("https://lovinator.space/rss_test.xml", "blacklist_content", "fvnnnfnfdnfdnfd") # type: ignore
|
||||
for entry in custom_reader.get_entries():
|
||||
if entry_is_blacklisted(entry) is True:
|
||||
assert entry.content[0].value == "<p>ffdnfdnfdnfdnfdndfn</p>"
|
||||
|
@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
@ -19,27 +19,30 @@ def test_search() -> None:
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Delete the webhook if it already exists before we run the test.
|
||||
response: Response = client.post("/delete_webhook", data={"webhook_url": webhook_url})
|
||||
response: Response = client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
||||
|
||||
# Add the webhook.
|
||||
response: Response = client.post("/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url})
|
||||
response: Response = client.post(
|
||||
url="/add_webhook",
|
||||
data={"webhook_name": webhook_name, "webhook_url": webhook_url},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was added.
|
||||
response = client.get("/")
|
||||
response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
|
||||
# Search for an entry.
|
||||
response: Response = client.get("/search/?query=a")
|
||||
response: Response = client.get(url="/search/?query=a")
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
@ -53,14 +56,17 @@ def test_encode_url() -> None:
|
||||
def test_add_webhook() -> None:
|
||||
"""Test the /add_webhook page."""
|
||||
# Delete the webhook if it already exists before we run the test.
|
||||
response: Response = client.post("/delete_webhook", data={"webhook_url": webhook_url})
|
||||
response: Response = client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
||||
|
||||
# Add the webhook.
|
||||
response: Response = client.post("/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url})
|
||||
response: Response = client.post(
|
||||
url="/add_webhook",
|
||||
data={"webhook_name": webhook_name, "webhook_url": webhook_url},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the webhook was added.
|
||||
response = client.get("/webhooks")
|
||||
response = client.get(url="/webhooks")
|
||||
assert response.status_code == 200
|
||||
assert webhook_name in response.text
|
||||
|
||||
@ -68,17 +74,17 @@ def test_add_webhook() -> None:
|
||||
def test_create_feed() -> None:
|
||||
"""Test the /create_feed page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
feeds: Response = client.get(url="/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was added.
|
||||
response = client.get("/")
|
||||
response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
|
||||
@ -88,11 +94,11 @@ def test_get() -> None:
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was added.
|
||||
@ -100,57 +106,60 @@ def test_get() -> None:
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
|
||||
response: Response = client.get("/add")
|
||||
response: Response = client.get(url="/add")
|
||||
assert response.status_code == 200
|
||||
|
||||
response: Response = client.get("/add_webhook")
|
||||
response: Response = client.get(url="/add_webhook")
|
||||
assert response.status_code == 200
|
||||
|
||||
response: Response = client.get("/blacklist", params={"feed_url": encoded_feed_url})
|
||||
response: Response = client.get(url="/blacklist", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
response: Response = client.get("/custom", params={"feed_url": encoded_feed_url})
|
||||
response: Response = client.get(url="/custom", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
response: Response = client.get("/embed", params={"feed_url": encoded_feed_url})
|
||||
response: Response = client.get(url="/embed", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
response: Response = client.get("/feed", params={"feed_url": encoded_feed_url})
|
||||
response: Response = client.get(url="/feed", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
response: Response = client.get("/")
|
||||
response: Response = client.get(url="/feed_more", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
response: Response = client.get("/webhooks")
|
||||
response: Response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
|
||||
response: Response = client.get("/whitelist", params={"feed_url": encoded_feed_url})
|
||||
response: Response = client.get(url="/webhooks")
|
||||
assert response.status_code == 200
|
||||
|
||||
response: Response = client.get(url="/whitelist", params={"feed_url": encoded_feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
def test_pause_feed() -> None:
|
||||
"""Test the /pause_feed page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
feeds: Response = client.get(url="/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
|
||||
# Unpause the feed if it is paused.
|
||||
feeds: Response = client.get("/")
|
||||
feeds: Response = client.get(url="/")
|
||||
if "Paused" in feeds.text:
|
||||
response: Response = client.post("/unpause", data={"feed_url": feed_url})
|
||||
response: Response = client.post(url="/unpause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Pause the feed.
|
||||
response: Response = client.post("/pause", data={"feed_url": feed_url})
|
||||
response: Response = client.post(url="/pause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was paused.
|
||||
response = client.get("/")
|
||||
response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
|
||||
@ -160,24 +169,24 @@ def test_unpause_feed() -> None:
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
|
||||
# Pause the feed if it is unpaused.
|
||||
feeds: Response = client.get("/")
|
||||
feeds: Response = client.get(url="/")
|
||||
if "Paused" not in feeds.text:
|
||||
response: Response = client.post("/pause", data={"feed_url": feed_url})
|
||||
response: Response = client.post(url="/pause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Unpause the feed.
|
||||
response: Response = client.post("/unpause", data={"feed_url": feed_url})
|
||||
response: Response = client.post(url="/unpause", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was unpaused.
|
||||
response = client.get("/")
|
||||
response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url in response.text
|
||||
|
||||
@ -185,20 +194,20 @@ def test_unpause_feed() -> None:
|
||||
def test_remove_feed() -> None:
|
||||
"""Test the /remove page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/")
|
||||
feeds: Response = client.get(url="/")
|
||||
if feed_url in feeds.text:
|
||||
client.post("/remove", data={"feed_url": feed_url})
|
||||
client.post("/remove", data={"feed_url": encoded_feed_url})
|
||||
client.post(url="/remove", data={"feed_url": feed_url})
|
||||
client.post(url="/remove", data={"feed_url": encoded_feed_url})
|
||||
|
||||
# Add the feed.
|
||||
response: Response = client.post("/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
response: Response = client.post(url="/add", data={"feed_url": feed_url, "webhook_dropdown": webhook_name})
|
||||
|
||||
# Remove the feed.
|
||||
response: Response = client.post("/remove", data={"feed_url": feed_url})
|
||||
response: Response = client.post(url="/remove", data={"feed_url": feed_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the feed was removed.
|
||||
response = client.get("/")
|
||||
response = client.get(url="/")
|
||||
assert response.status_code == 200
|
||||
assert feed_url not in response.text
|
||||
|
||||
@ -206,18 +215,21 @@ def test_remove_feed() -> None:
|
||||
def test_delete_webhook() -> None:
|
||||
"""Test the /delete_webhook page."""
|
||||
# Remove the feed if it already exists before we run the test.
|
||||
feeds: Response = client.get("/webhooks")
|
||||
feeds: Response = client.get(url="/webhooks")
|
||||
if webhook_url in feeds.text:
|
||||
client.post("/delete_webhook", data={"webhook_url": webhook_url})
|
||||
client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
||||
|
||||
# Add the webhook.
|
||||
response: Response = client.post("/add_webhook", data={"webhook_name": webhook_name, "webhook_url": webhook_url})
|
||||
response: Response = client.post(
|
||||
url="/add_webhook",
|
||||
data={"webhook_name": webhook_name, "webhook_url": webhook_url},
|
||||
)
|
||||
|
||||
# Delete the webhook.
|
||||
response: Response = client.post("/delete_webhook", data={"webhook_url": webhook_url})
|
||||
response: Response = client.post(url="/delete_webhook", data={"webhook_url": webhook_url})
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check that the webhook was added.
|
||||
response = client.get("/webhooks")
|
||||
response = client.get(url="/webhooks")
|
||||
assert response.status_code == 200
|
||||
assert webhook_name not in response.text
|
||||
|
@ -57,13 +57,13 @@ def test_convert_to_md() -> None:
|
||||
'<div class="field field-name-field-short-description field-type-text-long field-label-hidden">'
|
||||
'<div class="field-items"><div class="field-item even">Plus new options to mirror your camera and take a selfie.</div>' # noqa: E501
|
||||
'</div></div><div class="field field-name-field-thumbnail-image field-type-image field-label-hidden">'
|
||||
'<div class="field-items"><div class="field-item even"><a href="https://www.nvidia.com/en-us/geforce/news/jan-2023-nvidia-broadcast-update/">' # noqa: E501
|
||||
'<img width="210" src="https://www.nvidia.com/content/dam/en-zz/Solutions/geforce/news/jan-2023-nvidia-broadcast-update/broadcast-owned-asset-625x330-newsfeed.png"' # noqa: E501
|
||||
'<div class="field-items"><div class="field-item even"><a href="https://www.nvidia.com/en-us/geforce/news/jan-2023-nvidia-broadcast-update/">'
|
||||
'<img width="210" src="https://www.nvidia.com/content/dam/en-zz/Solutions/geforce/news/jan-2023-nvidia-broadcast-update/broadcast-owned-asset-625x330-newsfeed.png"'
|
||||
' title="NVIDIA Broadcast 1.4 Adds Eye Contact and Vignette Effects With Virtual Background Enhancements" '
|
||||
'alt="NVIDIA Broadcast 1.4 Adds Eye Contact and Vignette Effects With Virtual Background Enhancements"></a></div></div></div>' # noqa: E501
|
||||
)
|
||||
assert (
|
||||
convert_html_to_md(nvidia_entry)
|
||||
== "[NVIDIA Broadcast 1.4 Adds Eye Contact and Vignette Effects With Virtual Background Enhancements](https://www.nvidia.com/en-us/geforce/news/jan-2023-nvidia-broadcast-update/)\n" # noqa: E501
|
||||
== "[NVIDIA Broadcast 1.4 Adds Eye Contact and Vignette Effects With Virtual Background Enhancements](https://www.nvidia.com/en-us/geforce/news/jan-2023-nvidia-broadcast-update/)\n"
|
||||
"Plus new options to mirror your camera and take a selfie."
|
||||
)
|
||||
|
@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
Reference in New Issue
Block a user