Add git backup functionality
Fixes: https://github.com/TheLovinator1/discord-rss-bot/issues/421 Merges: https://github.com/TheLovinator1/discord-rss-bot/pull/422
This commit is contained in:
parent
9378dac0fa
commit
e8bd528def
16 changed files with 1062 additions and 89 deletions
|
|
@ -4,12 +4,15 @@ import urllib.parse
|
|||
from functools import lru_cache
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from discord_rss_bot.filter.blacklist import entry_should_be_skipped, feed_has_blacklist_tags
|
||||
from discord_rss_bot.filter.whitelist import has_white_tags, should_be_sent
|
||||
from discord_rss_bot.filter.blacklist import entry_should_be_skipped
|
||||
from discord_rss_bot.filter.blacklist import feed_has_blacklist_tags
|
||||
from discord_rss_bot.filter.whitelist import has_white_tags
|
||||
from discord_rss_bot.filter.whitelist import should_be_sent
|
||||
from discord_rss_bot.settings import get_reader
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from reader import Entry, Reader
|
||||
from reader import Entry
|
||||
from reader import Reader
|
||||
|
||||
# Our reader
|
||||
reader: Reader = get_reader()
|
||||
|
|
|
|||
|
|
@ -5,9 +5,13 @@ import json
|
|||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4 import Tag
|
||||
from markdownify import markdownify
|
||||
from reader import Entry, Feed, Reader, TagNotFoundError
|
||||
from reader import Entry
|
||||
from reader import Feed
|
||||
from reader import Reader
|
||||
from reader import TagNotFoundError
|
||||
|
||||
from discord_rss_bot.is_url_valid import is_url_valid
|
||||
from discord_rss_bot.settings import get_reader
|
||||
|
|
|
|||
|
|
@ -5,42 +5,41 @@ import logging
|
|||
import os
|
||||
import pprint
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
from urllib.parse import ParseResult
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import tldextract
|
||||
from discord_webhook import DiscordEmbed, DiscordWebhook
|
||||
from discord_webhook import DiscordEmbed
|
||||
from discord_webhook import DiscordWebhook
|
||||
from fastapi import HTTPException
|
||||
from markdownify import markdownify
|
||||
from reader import (
|
||||
Entry,
|
||||
EntryNotFoundError,
|
||||
Feed,
|
||||
FeedExistsError,
|
||||
FeedNotFoundError,
|
||||
Reader,
|
||||
ReaderError,
|
||||
StorageError,
|
||||
TagNotFoundError,
|
||||
)
|
||||
from reader import Entry
|
||||
from reader import EntryNotFoundError
|
||||
from reader import Feed
|
||||
from reader import FeedExistsError
|
||||
from reader import FeedNotFoundError
|
||||
from reader import Reader
|
||||
from reader import ReaderError
|
||||
from reader import StorageError
|
||||
from reader import TagNotFoundError
|
||||
|
||||
from discord_rss_bot.custom_message import (
|
||||
CustomEmbed,
|
||||
get_custom_message,
|
||||
replace_tags_in_embed,
|
||||
replace_tags_in_text_message,
|
||||
)
|
||||
from discord_rss_bot.custom_message import CustomEmbed
|
||||
from discord_rss_bot.custom_message import get_custom_message
|
||||
from discord_rss_bot.custom_message import replace_tags_in_embed
|
||||
from discord_rss_bot.custom_message import replace_tags_in_text_message
|
||||
from discord_rss_bot.filter.blacklist import entry_should_be_skipped
|
||||
from discord_rss_bot.filter.whitelist import has_white_tags, should_be_sent
|
||||
from discord_rss_bot.hoyolab_api import (
|
||||
create_hoyolab_webhook,
|
||||
extract_post_id_from_hoyolab_url,
|
||||
fetch_hoyolab_post,
|
||||
is_c3kay_feed,
|
||||
)
|
||||
from discord_rss_bot.filter.whitelist import has_white_tags
|
||||
from discord_rss_bot.filter.whitelist import should_be_sent
|
||||
from discord_rss_bot.hoyolab_api import create_hoyolab_webhook
|
||||
from discord_rss_bot.hoyolab_api import extract_post_id_from_hoyolab_url
|
||||
from discord_rss_bot.hoyolab_api import fetch_hoyolab_post
|
||||
from discord_rss_bot.hoyolab_api import is_c3kay_feed
|
||||
from discord_rss_bot.is_url_valid import is_url_valid
|
||||
from discord_rss_bot.missing_tags import add_missing_tags
|
||||
from discord_rss_bot.settings import default_custom_message, get_reader
|
||||
from discord_rss_bot.settings import default_custom_message
|
||||
from discord_rss_bot.settings import get_reader
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
|
|
|||
238
discord_rss_bot/git_backup.py
Normal file
238
discord_rss_bot/git_backup.py
Normal file
|
|
@ -0,0 +1,238 @@
|
|||
"""Git backup module for committing bot state changes to a private repository.
|
||||
|
||||
Configure the backup by setting these environment variables:
|
||||
- ``GIT_BACKUP_PATH``: Local filesystem path for the backup git repository.
|
||||
When set, the bot will initialise a git repo there (if one doesn't exist)
|
||||
and commit an export of its state after every relevant change.
|
||||
- ``GIT_BACKUP_REMOTE``: Optional remote URL (e.g. ``git@github.com:you/private-repo.git``).
|
||||
When set, every commit is followed by a ``git push`` to this remote.
|
||||
|
||||
The exported state is written as ``state.json`` inside the backup repo. It
|
||||
contains the list of feeds together with their webhook URL, filter settings
|
||||
(blacklist / whitelist, regex variants), custom messages and embed settings.
|
||||
Global webhooks are also included.
|
||||
|
||||
Example docker-compose snippet::
|
||||
|
||||
environment:
|
||||
- GIT_BACKUP_PATH=/data/backup
|
||||
- GIT_BACKUP_REMOTE=git@github.com:you/private-config.git
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess # noqa: S404
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from reader import Reader
|
||||
|
||||
logger: logging.Logger = logging.getLogger(__name__)
|
||||
GIT_EXECUTABLE: str = shutil.which("git") or "git"
|
||||
|
||||
|
||||
type TAG_VALUE = (
|
||||
dict[str, str | int | float | bool | dict[str, Any] | list[Any] | None]
|
||||
| list[str | int | float | bool | dict[str, Any] | list[Any] | None]
|
||||
| None
|
||||
)
|
||||
"""Type alias for the value of a feed tag, which can be a nested structure of dicts and lists, or None."""
|
||||
|
||||
# Tags that are exported per-feed (empty values are omitted).
|
||||
_FEED_TAGS: tuple[str, ...] = (
|
||||
"webhook",
|
||||
"custom_message",
|
||||
"should_send_embed",
|
||||
"embed",
|
||||
"blacklist_title",
|
||||
"blacklist_summary",
|
||||
"blacklist_content",
|
||||
"blacklist_author",
|
||||
"regex_blacklist_title",
|
||||
"regex_blacklist_summary",
|
||||
"regex_blacklist_content",
|
||||
"regex_blacklist_author",
|
||||
"whitelist_title",
|
||||
"whitelist_summary",
|
||||
"whitelist_content",
|
||||
"whitelist_author",
|
||||
"regex_whitelist_title",
|
||||
"regex_whitelist_summary",
|
||||
"regex_whitelist_content",
|
||||
"regex_whitelist_author",
|
||||
)
|
||||
|
||||
|
||||
def get_backup_path() -> Path | None:
|
||||
"""Return the configured backup path, or *None* if not configured.
|
||||
|
||||
Returns:
|
||||
Path to the backup repository, or None if ``GIT_BACKUP_PATH`` is unset.
|
||||
"""
|
||||
raw: str = os.environ.get("GIT_BACKUP_PATH", "").strip()
|
||||
return Path(raw) if raw else None
|
||||
|
||||
|
||||
def get_backup_remote() -> str:
|
||||
"""Return the configured remote URL, or an empty string if not set.
|
||||
|
||||
Returns:
|
||||
The remote URL string from ``GIT_BACKUP_REMOTE``, or ``""`` if unset.
|
||||
"""
|
||||
return os.environ.get("GIT_BACKUP_REMOTE", "").strip()
|
||||
|
||||
|
||||
def setup_backup_repo(backup_path: Path) -> bool:
|
||||
"""Ensure the backup directory exists and contains a git repository.
|
||||
|
||||
If the directory does not yet contain a ``.git`` folder a new repository is
|
||||
initialised. A basic git identity is configured locally so that commits
|
||||
succeed even in environments where a global ``~/.gitconfig`` is absent.
|
||||
|
||||
Args:
|
||||
backup_path: Local path for the backup repository.
|
||||
|
||||
Returns:
|
||||
``True`` if the repository is ready, ``False`` on any error.
|
||||
"""
|
||||
try:
|
||||
backup_path.mkdir(parents=True, exist_ok=True)
|
||||
git_dir: Path = backup_path / ".git"
|
||||
if not git_dir.exists():
|
||||
subprocess.run([GIT_EXECUTABLE, "init", str(backup_path)], check=True, capture_output=True) # noqa: S603
|
||||
logger.info("Initialised git backup repository at %s", backup_path)
|
||||
|
||||
# Ensure a local identity exists so that `git commit` always works.
|
||||
for key, value in (("user.email", "discord-rss-bot@localhost"), ("user.name", "discord-rss-bot")):
|
||||
result: subprocess.CompletedProcess[bytes] = subprocess.run( # noqa: S603
|
||||
[GIT_EXECUTABLE, "-C", str(backup_path), "config", "--local", key],
|
||||
check=False,
|
||||
capture_output=True,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
subprocess.run( # noqa: S603
|
||||
[GIT_EXECUTABLE, "-C", str(backup_path), "config", "--local", key, value],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
# Configure the remote if GIT_BACKUP_REMOTE is set.
|
||||
remote_url: str = get_backup_remote()
|
||||
if remote_url:
|
||||
# Check if remote "origin" already exists.
|
||||
check_remote: subprocess.CompletedProcess[bytes] = subprocess.run( # noqa: S603
|
||||
[GIT_EXECUTABLE, "-C", str(backup_path), "remote", "get-url", "origin"],
|
||||
check=False,
|
||||
capture_output=True,
|
||||
)
|
||||
if check_remote.returncode != 0:
|
||||
# Remote doesn't exist, add it.
|
||||
subprocess.run( # noqa: S603
|
||||
[GIT_EXECUTABLE, "-C", str(backup_path), "remote", "add", "origin", remote_url],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
logger.info("Added remote 'origin' with URL: %s", remote_url)
|
||||
else:
|
||||
# Remote exists, update it if the URL has changed.
|
||||
current_url: str = check_remote.stdout.decode().strip()
|
||||
if current_url != remote_url:
|
||||
subprocess.run( # noqa: S603
|
||||
[GIT_EXECUTABLE, "-C", str(backup_path), "remote", "set-url", "origin", remote_url],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
logger.info("Updated remote 'origin' URL from %s to %s", current_url, remote_url)
|
||||
except Exception:
|
||||
logger.exception("Failed to set up git backup repository at %s", backup_path)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def export_state(reader: Reader, backup_path: Path) -> None:
|
||||
"""Serialise the current bot state to ``state.json`` inside *backup_path*.
|
||||
|
||||
Args:
|
||||
reader: The :class:`reader.Reader` instance to read state from.
|
||||
backup_path: Destination directory for the exported ``state.json``.
|
||||
"""
|
||||
feeds_state: list[dict] = []
|
||||
for feed in reader.get_feeds():
|
||||
feed_data: dict = {"url": feed.url}
|
||||
for tag in _FEED_TAGS:
|
||||
try:
|
||||
value: TAG_VALUE = reader.get_tag(feed, tag, None)
|
||||
if value is not None and value != "": # noqa: PLC1901
|
||||
feed_data[tag] = value
|
||||
except Exception:
|
||||
logger.exception("Failed to read tag '%s' for feed '%s' during state export", tag, feed.url)
|
||||
feeds_state.append(feed_data)
|
||||
|
||||
try:
|
||||
webhooks: list[str | int | float | bool | dict[str, Any] | list[Any] | None] = list(
|
||||
reader.get_tag((), "webhooks", [])
|
||||
)
|
||||
except Exception: # noqa: BLE001
|
||||
webhooks = []
|
||||
|
||||
state: dict = {"feeds": feeds_state, "webhooks": webhooks}
|
||||
state_file: Path = backup_path / "state.json"
|
||||
state_file.write_text(json.dumps(state, indent=2, default=str), encoding="utf-8")
|
||||
|
||||
|
||||
def commit_state_change(reader: Reader, message: str) -> None:
|
||||
"""Export current state and commit it to the backup repository.
|
||||
|
||||
This is a no-op when ``GIT_BACKUP_PATH`` is not configured. Errors are
|
||||
logged but never raised so that a backup failure never interrupts normal
|
||||
bot operation.
|
||||
|
||||
Args:
|
||||
reader: The :class:`reader.Reader` instance to read state from.
|
||||
message: Commit message describing the change (e.g. ``"Add feed example.com/rss.xml"``).
|
||||
"""
|
||||
backup_path: Path | None = get_backup_path()
|
||||
if backup_path is None:
|
||||
return
|
||||
|
||||
if not setup_backup_repo(backup_path):
|
||||
return
|
||||
|
||||
try:
|
||||
export_state(reader, backup_path)
|
||||
|
||||
subprocess.run([GIT_EXECUTABLE, "-C", str(backup_path), "add", "-A"], check=True, capture_output=True) # noqa: S603
|
||||
|
||||
# Only create a commit if there are staged changes.
|
||||
diff_result: subprocess.CompletedProcess[bytes] = subprocess.run( # noqa: S603
|
||||
[GIT_EXECUTABLE, "-C", str(backup_path), "diff", "--cached", "--exit-code"],
|
||||
check=False,
|
||||
capture_output=True,
|
||||
)
|
||||
if diff_result.returncode == 0:
|
||||
logger.debug("No state changes to commit for: %s", message)
|
||||
return
|
||||
|
||||
subprocess.run( # noqa: S603
|
||||
[GIT_EXECUTABLE, "-C", str(backup_path), "commit", "-m", message],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
logger.info("Committed state change to backup repo: %s", message)
|
||||
|
||||
# Push to remote if configured.
|
||||
if get_backup_remote():
|
||||
subprocess.run( # noqa: S603
|
||||
[GIT_EXECUTABLE, "-C", str(backup_path), "push", "origin", "HEAD"],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
logger.info("Pushed state change to remote 'origin': %s", message)
|
||||
except Exception:
|
||||
logger.exception("Failed to commit state change '%s' to backup repo", message)
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
from urllib.parse import ParseResult
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
def is_url_valid(url: str) -> bool:
|
||||
|
|
|
|||
|
|
@ -7,48 +7,62 @@ import typing
|
|||
import urllib.parse
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime
|
||||
from datetime import UTC
|
||||
from datetime import datetime
|
||||
from functools import lru_cache
|
||||
from typing import TYPE_CHECKING, Annotated, cast
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Annotated
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
|
||||
import httpx
|
||||
import sentry_sdk
|
||||
import uvicorn
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from fastapi import FastAPI, Form, HTTPException, Request
|
||||
from fastapi import FastAPI
|
||||
from fastapi import Form
|
||||
from fastapi import HTTPException
|
||||
from fastapi import Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from httpx import Response
|
||||
from markdownify import markdownify
|
||||
from reader import Entry, EntryNotFoundError, Feed, FeedNotFoundError, Reader, TagNotFoundError
|
||||
from reader import Entry
|
||||
from reader import EntryNotFoundError
|
||||
from reader import Feed
|
||||
from reader import FeedNotFoundError
|
||||
from reader import Reader
|
||||
from reader import TagNotFoundError
|
||||
from starlette.responses import RedirectResponse
|
||||
|
||||
from discord_rss_bot import settings
|
||||
from discord_rss_bot.custom_filters import (
|
||||
entry_is_blacklisted,
|
||||
entry_is_whitelisted,
|
||||
)
|
||||
from discord_rss_bot.custom_message import (
|
||||
CustomEmbed,
|
||||
get_custom_message,
|
||||
get_embed,
|
||||
get_first_image,
|
||||
replace_tags_in_text_message,
|
||||
save_embed,
|
||||
)
|
||||
from discord_rss_bot.feeds import create_feed, extract_domain, send_entry_to_discord, send_to_discord
|
||||
from discord_rss_bot.custom_filters import entry_is_blacklisted
|
||||
from discord_rss_bot.custom_filters import entry_is_whitelisted
|
||||
from discord_rss_bot.custom_message import CustomEmbed
|
||||
from discord_rss_bot.custom_message import get_custom_message
|
||||
from discord_rss_bot.custom_message import get_embed
|
||||
from discord_rss_bot.custom_message import get_first_image
|
||||
from discord_rss_bot.custom_message import replace_tags_in_text_message
|
||||
from discord_rss_bot.custom_message import save_embed
|
||||
from discord_rss_bot.feeds import create_feed
|
||||
from discord_rss_bot.feeds import extract_domain
|
||||
from discord_rss_bot.feeds import send_entry_to_discord
|
||||
from discord_rss_bot.feeds import send_to_discord
|
||||
from discord_rss_bot.git_backup import commit_state_change
|
||||
from discord_rss_bot.git_backup import get_backup_path
|
||||
from discord_rss_bot.missing_tags import add_missing_tags
|
||||
from discord_rss_bot.search import create_search_context
|
||||
from discord_rss_bot.settings import get_reader
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import AsyncGenerator, Iterable
|
||||
from collections.abc import AsyncGenerator
|
||||
from collections.abc import Iterable
|
||||
|
||||
from reader.types import JSONType
|
||||
|
||||
|
||||
LOGGING_CONFIG = {
|
||||
LOGGING_CONFIG: dict[str, Any] = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": True,
|
||||
"formatters": {
|
||||
|
|
@ -130,11 +144,11 @@ async def post_add_webhook(
|
|||
webhook_name: The name of the webhook.
|
||||
webhook_url: The url of the webhook.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the webhook already exists.
|
||||
|
||||
Returns:
|
||||
RedirectResponse: Redirect to the index page.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the webhook already exists.
|
||||
"""
|
||||
# Get current webhooks from the database if they exist otherwise use an empty list.
|
||||
webhooks = list(reader.get_tag((), "webhooks", []))
|
||||
|
|
@ -151,6 +165,8 @@ async def post_add_webhook(
|
|||
|
||||
reader.set_tag((), "webhooks", webhooks) # pyright: ignore[reportArgumentType]
|
||||
|
||||
commit_state_change(reader, f"Add webhook {webhook_name.strip()}")
|
||||
|
||||
return RedirectResponse(url="/", status_code=303)
|
||||
|
||||
# TODO(TheLovinator): Show this error on the page.
|
||||
|
|
@ -165,11 +181,12 @@ async def post_delete_webhook(webhook_url: Annotated[str, Form()]) -> RedirectRe
|
|||
Args:
|
||||
webhook_url: The url of the webhook.
|
||||
|
||||
Returns:
|
||||
RedirectResponse: Redirect to the index page.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the webhook could not be deleted
|
||||
|
||||
Returns:
|
||||
RedirectResponse: Redirect to the index page.
|
||||
"""
|
||||
# TODO(TheLovinator): Check if the webhook is in use by any feeds before deleting it.
|
||||
# TODO(TheLovinator): Replace HTTPException with a custom exception for both of these.
|
||||
|
|
@ -196,6 +213,8 @@ async def post_delete_webhook(webhook_url: Annotated[str, Form()]) -> RedirectRe
|
|||
# Add our new list of webhooks to the database.
|
||||
reader.set_tag((), "webhooks", webhooks) # pyright: ignore[reportArgumentType]
|
||||
|
||||
commit_state_change(reader, f"Delete webhook {webhook_url.strip()}")
|
||||
|
||||
return RedirectResponse(url="/", status_code=303)
|
||||
|
||||
|
||||
|
|
@ -215,6 +234,7 @@ async def post_create_feed(
|
|||
"""
|
||||
clean_feed_url: str = feed_url.strip()
|
||||
create_feed(reader, feed_url, webhook_dropdown)
|
||||
commit_state_change(reader, f"Add feed {clean_feed_url}")
|
||||
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||
|
||||
|
||||
|
|
@ -286,6 +306,8 @@ async def post_set_whitelist(
|
|||
reader.set_tag(clean_feed_url, "regex_whitelist_content", regex_whitelist_content) # pyright: ignore[reportArgumentType][call-overload]
|
||||
reader.set_tag(clean_feed_url, "regex_whitelist_author", regex_whitelist_author) # pyright: ignore[reportArgumentType][call-overload]
|
||||
|
||||
commit_state_change(reader, f"Update whitelist for {clean_feed_url}")
|
||||
|
||||
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||
|
||||
|
||||
|
|
@ -367,6 +389,7 @@ async def post_set_blacklist(
|
|||
reader.set_tag(clean_feed_url, "regex_blacklist_summary", regex_blacklist_summary) # pyright: ignore[reportArgumentType][call-overload]
|
||||
reader.set_tag(clean_feed_url, "regex_blacklist_content", regex_blacklist_content) # pyright: ignore[reportArgumentType][call-overload]
|
||||
reader.set_tag(clean_feed_url, "regex_blacklist_author", regex_blacklist_author) # pyright: ignore[reportArgumentType][call-overload]
|
||||
commit_state_change(reader, f"Update blacklist for {clean_feed_url}")
|
||||
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||
|
||||
|
||||
|
|
@ -433,6 +456,7 @@ async def post_set_custom(
|
|||
reader.set_tag(feed_url, "custom_message", default_custom_message)
|
||||
|
||||
clean_feed_url: str = feed_url.strip()
|
||||
commit_state_change(reader, f"Update custom message for {clean_feed_url}")
|
||||
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||
|
||||
|
||||
|
|
@ -552,6 +576,7 @@ async def post_embed(
|
|||
# Save the data.
|
||||
save_embed(reader, feed, custom_embed)
|
||||
|
||||
commit_state_change(reader, f"Update embed settings for {clean_feed_url}")
|
||||
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||
|
||||
|
||||
|
|
@ -567,6 +592,7 @@ async def post_use_embed(feed_url: Annotated[str, Form()]) -> RedirectResponse:
|
|||
"""
|
||||
clean_feed_url: str = feed_url.strip()
|
||||
reader.set_tag(clean_feed_url, "should_send_embed", True) # pyright: ignore[reportArgumentType]
|
||||
commit_state_change(reader, f"Enable embed mode for {clean_feed_url}")
|
||||
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||
|
||||
|
||||
|
|
@ -582,6 +608,7 @@ async def post_use_text(feed_url: Annotated[str, Form()]) -> RedirectResponse:
|
|||
"""
|
||||
clean_feed_url: str = feed_url.strip()
|
||||
reader.set_tag(clean_feed_url, "should_send_embed", False) # pyright: ignore[reportArgumentType]
|
||||
commit_state_change(reader, f"Disable embed mode for {clean_feed_url}")
|
||||
return RedirectResponse(url=f"/feed?feed_url={urllib.parse.quote(clean_feed_url)}", status_code=303)
|
||||
|
||||
|
||||
|
|
@ -611,11 +638,11 @@ async def get_feed(feed_url: str, request: Request, starting_after: str = ""):
|
|||
request: The request object.
|
||||
starting_after: The entry to start after. Used for pagination.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the feed is not found.
|
||||
|
||||
Returns:
|
||||
HTMLResponse: The feed page.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the feed is not found.
|
||||
"""
|
||||
entries_per_page: int = 20
|
||||
|
||||
|
|
@ -845,23 +872,25 @@ async def get_webhooks(request: Request):
|
|||
|
||||
|
||||
@app.get("/", response_class=HTMLResponse)
|
||||
def get_index(request: Request):
|
||||
def get_index(request: Request, message: str = ""):
|
||||
"""This is the root of the website.
|
||||
|
||||
Args:
|
||||
request: The request object.
|
||||
message: Optional message to display to the user.
|
||||
|
||||
Returns:
|
||||
HTMLResponse: The index page.
|
||||
"""
|
||||
return templates.TemplateResponse(request=request, name="index.html", context=make_context_index(request))
|
||||
return templates.TemplateResponse(request=request, name="index.html", context=make_context_index(request, message))
|
||||
|
||||
|
||||
def make_context_index(request: Request):
|
||||
def make_context_index(request: Request, message: str = ""):
|
||||
"""Create the needed context for the index page.
|
||||
|
||||
Args:
|
||||
request: The request object.
|
||||
message: Optional message to display to the user.
|
||||
|
||||
Returns:
|
||||
dict: The context for the index page.
|
||||
|
|
@ -894,6 +923,7 @@ def make_context_index(request: Request):
|
|||
"webhooks": hooks,
|
||||
"broken_feeds": broken_feeds,
|
||||
"feeds_without_attached_webhook": feeds_without_attached_webhook,
|
||||
"messages": message or None,
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -904,17 +934,20 @@ async def remove_feed(feed_url: Annotated[str, Form()]):
|
|||
Args:
|
||||
feed_url: The feed to add.
|
||||
|
||||
Raises:
|
||||
HTTPException: Feed not found
|
||||
|
||||
Returns:
|
||||
RedirectResponse: Redirect to the index page.
|
||||
|
||||
Raises:
|
||||
HTTPException: Feed not found
|
||||
"""
|
||||
try:
|
||||
reader.delete_feed(urllib.parse.unquote(feed_url))
|
||||
except FeedNotFoundError as e:
|
||||
raise HTTPException(status_code=404, detail="Feed not found") from e
|
||||
|
||||
commit_state_change(reader, f"Remove feed {urllib.parse.unquote(feed_url)}")
|
||||
|
||||
return RedirectResponse(url="/", status_code=303)
|
||||
|
||||
|
||||
|
|
@ -926,11 +959,12 @@ async def update_feed(request: Request, feed_url: str):
|
|||
request: The request object.
|
||||
feed_url: The feed URL to update.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the feed is not found.
|
||||
|
||||
Returns:
|
||||
RedirectResponse: Redirect to the feed page.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the feed is not found.
|
||||
"""
|
||||
try:
|
||||
reader.update_feed(urllib.parse.unquote(feed_url))
|
||||
|
|
@ -941,6 +975,33 @@ async def update_feed(request: Request, feed_url: str):
|
|||
return RedirectResponse(url="/feed?feed_url=" + urllib.parse.quote(feed_url), status_code=303)
|
||||
|
||||
|
||||
@app.post("/backup")
|
||||
async def manual_backup(request: Request) -> RedirectResponse:
|
||||
"""Manually trigger a git backup of the current state.
|
||||
|
||||
Args:
|
||||
request: The request object.
|
||||
|
||||
Returns:
|
||||
RedirectResponse: Redirect to the index page with a success or error message.
|
||||
"""
|
||||
backup_path = get_backup_path()
|
||||
if backup_path is None:
|
||||
message = "Git backup is not configured. Set GIT_BACKUP_PATH environment variable to enable backups."
|
||||
logger.warning("Manual git backup attempted but GIT_BACKUP_PATH is not configured")
|
||||
return RedirectResponse(url=f"/?message={urllib.parse.quote(message)}", status_code=303)
|
||||
|
||||
try:
|
||||
commit_state_change(reader, "Manual backup triggered from web UI")
|
||||
message = "Successfully created git backup!"
|
||||
logger.info("Manual git backup completed successfully")
|
||||
except Exception as e:
|
||||
message = f"Failed to create git backup: {e}"
|
||||
logger.exception("Manual git backup failed")
|
||||
|
||||
return RedirectResponse(url=f"/?message={urllib.parse.quote(message)}", status_code=303)
|
||||
|
||||
|
||||
@app.get("/search", response_class=HTMLResponse)
|
||||
async def search(request: Request, query: str):
|
||||
"""Get entries matching a full-text search query.
|
||||
|
|
@ -988,11 +1049,12 @@ def modify_webhook(old_hook: Annotated[str, Form()], new_hook: Annotated[str, Fo
|
|||
old_hook: The webhook to modify.
|
||||
new_hook: The new webhook.
|
||||
|
||||
Returns:
|
||||
RedirectResponse: Redirect to the webhook page.
|
||||
|
||||
Raises:
|
||||
HTTPException: Webhook could not be modified.
|
||||
|
||||
Returns:
|
||||
RedirectResponse: Redirect to the webhook page.
|
||||
"""
|
||||
# Get current webhooks from the database if they exist otherwise use an empty list.
|
||||
webhooks = list(reader.get_tag((), "webhooks", []))
|
||||
|
|
@ -1042,11 +1104,11 @@ def extract_youtube_video_id(url: str) -> str | None:
|
|||
|
||||
# Handle standard YouTube URLs (youtube.com/watch?v=VIDEO_ID)
|
||||
if "youtube.com/watch" in url and "v=" in url:
|
||||
return url.split("v=")[1].split("&")[0]
|
||||
return url.split("v=")[1].split("&", maxsplit=1)[0]
|
||||
|
||||
# Handle shortened YouTube URLs (youtu.be/VIDEO_ID)
|
||||
if "youtu.be/" in url:
|
||||
return url.split("youtu.be/")[1].split("?")[0]
|
||||
return url.split("youtu.be/")[1].split("?", maxsplit=1)[0]
|
||||
|
||||
return None
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from reader import Feed, Reader, TagNotFoundError
|
||||
from reader import Feed
|
||||
from reader import Reader
|
||||
from reader import TagNotFoundError
|
||||
|
||||
from discord_rss_bot.settings import default_custom_embed, default_custom_message
|
||||
from discord_rss_bot.settings import default_custom_embed
|
||||
from discord_rss_bot.settings import default_custom_message
|
||||
|
||||
|
||||
def add_custom_message(reader: Reader, feed: Feed) -> None:
|
||||
|
|
|
|||
|
|
@ -8,7 +8,10 @@ from discord_rss_bot.settings import get_reader
|
|||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
from reader import EntrySearchResult, Feed, HighlightedString, Reader
|
||||
from reader import EntrySearchResult
|
||||
from reader import Feed
|
||||
from reader import HighlightedString
|
||||
from reader import Reader
|
||||
|
||||
|
||||
def create_search_context(query: str, custom_reader: Reader | None = None) -> dict:
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ from functools import lru_cache
|
|||
from pathlib import Path
|
||||
|
||||
from platformdirs import user_data_dir
|
||||
from reader import Reader, make_reader
|
||||
from reader import Reader
|
||||
from reader import make_reader
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from reader.types import JSONType
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
<nav class="navbar navbar-expand-md navbar-dark p-2 mb-3 border-bottom border-warning">
|
||||
<div class="container-fluid">
|
||||
<button class="navbar-toggler ms-auto" type="button" data-bs-toggle="collapse" data-bs-target="#collapseNavbar">
|
||||
<button class="navbar-toggler ms-auto"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#collapseNavbar">
|
||||
<span class="navbar-toggler-icon"></span>
|
||||
</button>
|
||||
<div class="navbar-collapse collapse" id="collapseNavbar">
|
||||
|
|
@ -16,11 +19,23 @@
|
|||
<li class="nav-item">
|
||||
<a class="nav-link" href="/webhooks">Webhooks</a>
|
||||
</li>
|
||||
<li class="nav-item nav-link d-none d-md-block">|</li>
|
||||
<li class="nav-item">
|
||||
<form action="/backup" method="post" class="d-inline">
|
||||
<button type="submit"
|
||||
class="nav-link btn btn-link text-decoration-none"
|
||||
onclick="return confirm('Create a manual git backup of the current state?');">
|
||||
Backup
|
||||
</button>
|
||||
</form>
|
||||
</li>
|
||||
</ul>
|
||||
{# Search #}
|
||||
<form action="/search" method="get" class="ms-auto w-50 input-group">
|
||||
<input name="query" class="form-control bg-dark border-dark text-muted" type="search"
|
||||
placeholder="Search" />
|
||||
<input name="query"
|
||||
class="form-control bg-dark border-dark text-muted"
|
||||
type="search"
|
||||
placeholder="Search" />
|
||||
</form>
|
||||
{# Donate button #}
|
||||
<ul class="navbar-nav ms-auto">
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue