This commit is contained in:
parent
5ff8a87680
commit
ce22fb68f3
50 changed files with 539 additions and 1439 deletions
11
.env.example
11
.env.example
|
|
@ -1,11 +0,0 @@
|
|||
DEBUG=True
|
||||
SECRET_KEY=
|
||||
DB_NAME=feedvault
|
||||
DB_USER=feedvault
|
||||
DB_PASSWORD=
|
||||
DB_HOST=192.168.1.2
|
||||
DB_PORT=5433
|
||||
REDIS_PASSWORD=
|
||||
REDIS_PORT=6380
|
||||
REDIS_HOST=192.168.1.2
|
||||
REDIS_PASSWORD=
|
||||
11
.github/SECURITY.md
vendored
11
.github/SECURITY.md
vendored
|
|
@ -1,11 +0,0 @@
|
|||
# Reporting a Vulnerability
|
||||
|
||||
tl;dr: [open a draft security advisory](https://github.com/TheLovinator1/FeedVault/security/advisories/new).
|
||||
|
||||
---
|
||||
|
||||
You can also email me at [hello@feedvault.se](mailto:hello@feedvault.se).
|
||||
|
||||
I am also available on Discord at `TheLovinator#9276`.
|
||||
|
||||
Thanks :-)
|
||||
27
.github/workflows/deploy.yml
vendored
Normal file
27
.github/workflows/deploy.yml
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
name: Deploy to Server
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: self-hosted
|
||||
|
||||
env:
|
||||
DJANGO_DEBUG: False
|
||||
DJANGO_SECRET_KEY: 1234567890
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- run: uv sync --all-extras --dev -U
|
||||
- run: uv run pytest
|
||||
- name: Check if makemigrations are needed
|
||||
run: |
|
||||
if uv run python manage.py makemigrations --check --dry-run; then
|
||||
echo "No migrations needed."
|
||||
else
|
||||
echo "Migrations needed. Please create and commit migrations before deploying."
|
||||
exit 1
|
||||
fi
|
||||
52
.github/workflows/docker-publish.yml
vendored
52
.github/workflows/docker-publish.yml
vendored
|
|
@ -1,52 +0,0 @@
|
|||
name: Test and Build Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
SECRET_KEY: 1234567890
|
||||
DEBUG: True
|
||||
ADMIN_EMAIL: 4153203+TheLovinator1@users.noreply.github.com
|
||||
EMAIL_HOST_USER: ${{ secrets.EMAIL_HOST_USER }}
|
||||
EMAIL_HOST_PASSWORD: ${{ secrets.EMAIL_HOST_PASSWORD }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.13
|
||||
- run: pipx install uv
|
||||
- run: uv venv
|
||||
- run: source .venv/bin/activate
|
||||
- run: pip install -r requirements.txt
|
||||
- run: pip install -r requirements-dev.txt
|
||||
- run: pytest
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: github.event_name != 'pull_request'
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
needs: test
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- uses: docker/login-action@v4
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: |
|
||||
ghcr.io/thelovinator1/feedvault:latest
|
||||
14
.github/workflows/ruff.yml
vendored
14
.github/workflows/ruff.yml
vendored
|
|
@ -1,14 +0,0 @@
|
|||
name: Ruff
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # Run every day
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
ruff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: chartboost/ruff-action@v1
|
||||
82
.gitignore
vendored
82
.gitignore
vendored
|
|
@ -1,6 +1,6 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.py[codz]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
|
|
@ -46,7 +46,7 @@ htmlcov/
|
|||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
*.py.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
|
@ -85,29 +85,44 @@ ipython_config.py
|
|||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
Pipfile.lock
|
||||
|
||||
# UV
|
||||
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
uv.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
poetry.lock
|
||||
poetry.toml
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
||||
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
#pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# pixi
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
||||
#pixi.lock
|
||||
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
||||
# in the .venv directory. It is recommended not to include this directory in version control.
|
||||
.pixi
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
|
@ -121,6 +136,7 @@ celerybeat.pid
|
|||
|
||||
# Environments
|
||||
.env
|
||||
.envrc
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
|
|
@ -153,16 +169,42 @@ dmypy.json
|
|||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
.idea/
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# FeedVault directories
|
||||
data/
|
||||
media/
|
||||
# Abstra
|
||||
# Abstra is an AI-powered process automation framework.
|
||||
# Ignore directories containing user credentials, local state, and settings.
|
||||
# Learn more at https://abstra.io/docs
|
||||
.abstra/
|
||||
|
||||
# Visual Studio Code
|
||||
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
||||
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
||||
# you could uncomment the following to ignore the entire vscode folder
|
||||
# .vscode/
|
||||
|
||||
# Ruff stuff:
|
||||
.ruff_cache/
|
||||
|
||||
# PyPI configuration file
|
||||
.pypirc
|
||||
|
||||
# Cursor
|
||||
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
|
||||
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
|
||||
# refer to https://docs.cursor.com/context/ignore-files
|
||||
.cursorignore
|
||||
.cursorindexingignore
|
||||
|
||||
# Marimo
|
||||
marimo/_static/
|
||||
marimo/_lsp/
|
||||
__marimo__/
|
||||
|
||||
# Directories tied to our project
|
||||
staticfiles/
|
||||
|
||||
# https://github.com/lemon24/reader
|
||||
*.sqlite.search
|
||||
*.sqlite
|
||||
|
||||
# When running the cli.py script, the following files are created
|
||||
broken_feeds.csv
|
||||
|
|
|
|||
3
.gitmodules
vendored
3
.gitmodules
vendored
|
|
@ -1,3 +0,0 @@
|
|||
[submodule "RSS-Link-Database"]
|
||||
path = RSS-Link-Database
|
||||
url = https://github.com/rumca-js/RSS-Link-Database
|
||||
|
|
@ -1,52 +1,45 @@
|
|||
default_language_version:
|
||||
python: python3.12
|
||||
repos:
|
||||
# Automatically add trailing commas to calls and literals.
|
||||
- repo: https://github.com/asottile/add-trailing-comma
|
||||
rev: v3.1.0
|
||||
rev: v4.0.0
|
||||
hooks:
|
||||
- id: add-trailing-comma
|
||||
|
||||
# Some out-of-the-box hooks for pre-commit.
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
- id: check-ast
|
||||
- id: check-builtin-literals
|
||||
- id: check-case-conflict
|
||||
- id: check-docstring-first
|
||||
- id: check-executables-have-shebangs
|
||||
- id: check-merge-conflict
|
||||
- id: check-shebang-scripts-are-executable
|
||||
- id: check-symlinks
|
||||
- id: check-toml
|
||||
- id: check-vcs-permalinks
|
||||
- id: check-xml
|
||||
- id: check-yaml
|
||||
- id: debug-statements
|
||||
- id: end-of-file-fixer
|
||||
- id: mixed-line-ending
|
||||
- id: name-tests-test
|
||||
args: [--pytest-test-first]
|
||||
- id: trailing-whitespace
|
||||
|
||||
# Run Pyupgrade on all Python files. This will upgrade the code to Python 3.12.
|
||||
- repo: https://github.com/adamchainz/django-upgrade
|
||||
rev: 1.30.0
|
||||
hooks:
|
||||
- id: django-upgrade
|
||||
args: [--target-version, "6.0"]
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.15.5
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args: ["--fix", "--exit-non-zero-on-fix"]
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.15.2
|
||||
rev: v3.21.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: ["--py312-plus"]
|
||||
args: ["--py311-plus"]
|
||||
|
||||
# An extremely fast Python linter and formatter.
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: ["--fix", "--exit-non-zero-on-fix"]
|
||||
|
||||
# Static checker for GitHub Actions workflow files.
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.0
|
||||
rev: v1.7.11
|
||||
hooks:
|
||||
- id: actionlint
|
||||
|
|
|
|||
16
.vscode/launch.json
vendored
16
.vscode/launch.json
vendored
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python Debugger: FastAPI",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"args": [
|
||||
"app.main:app",
|
||||
"--reload"
|
||||
],
|
||||
"jinja": true
|
||||
}
|
||||
]
|
||||
}
|
||||
115
.vscode/settings.json
vendored
115
.vscode/settings.json
vendored
|
|
@ -1,115 +0,0 @@
|
|||
{
|
||||
"cSpell.words": [
|
||||
"aiofiles",
|
||||
"airbox",
|
||||
"Aiur",
|
||||
"applist",
|
||||
"arcor",
|
||||
"arpa",
|
||||
"asus",
|
||||
"aterm",
|
||||
"blocklist",
|
||||
"blocklists",
|
||||
"brotli",
|
||||
"bthomehub",
|
||||
"bthub",
|
||||
"Ceci",
|
||||
"chartboost",
|
||||
"cloudflared",
|
||||
"collectstatic",
|
||||
"congstar",
|
||||
"datetime",
|
||||
"dbname",
|
||||
"dbpool",
|
||||
"DBSTRING",
|
||||
"easybox",
|
||||
"Eo's",
|
||||
"errorreportsto",
|
||||
"etxr",
|
||||
"feedburner",
|
||||
"feedi",
|
||||
"feedparser",
|
||||
"feedvault",
|
||||
"gaierror",
|
||||
"giga",
|
||||
"githubtest",
|
||||
"godotenv",
|
||||
"gofeed",
|
||||
"gomod",
|
||||
"gorm",
|
||||
"Hiredis",
|
||||
"hitronhub",
|
||||
"homerouter",
|
||||
"hotspot",
|
||||
"htmx",
|
||||
"huaweimobilewifi",
|
||||
"isready",
|
||||
"Itune",
|
||||
"jackc",
|
||||
"joho",
|
||||
"ldflags",
|
||||
"leftright",
|
||||
"levelname",
|
||||
"listparser",
|
||||
"lmao",
|
||||
"localbattle",
|
||||
"localdomain",
|
||||
"lscr",
|
||||
"makemigrations",
|
||||
"malformedurl",
|
||||
"memlock",
|
||||
"meowning",
|
||||
"mmcdole",
|
||||
"Monero",
|
||||
"myfritz",
|
||||
"naturalsize",
|
||||
"nyaa",
|
||||
"Nyanpasu",
|
||||
"Omnis",
|
||||
"orjson",
|
||||
"pacman",
|
||||
"PGHOST",
|
||||
"PGID",
|
||||
"PGPORT",
|
||||
"pgtype",
|
||||
"PGUSER",
|
||||
"pgxpool",
|
||||
"pipx",
|
||||
"Plipp",
|
||||
"Prés",
|
||||
"pressly",
|
||||
"psql",
|
||||
"PUID",
|
||||
"Rawr",
|
||||
"referer",
|
||||
"regexes",
|
||||
"Retour",
|
||||
"Roboto",
|
||||
"routerlogin",
|
||||
"Scotty",
|
||||
"snek",
|
||||
"speedport",
|
||||
"sqlc",
|
||||
"sslmode",
|
||||
"staticfiles",
|
||||
"steamloopback",
|
||||
"stretchr",
|
||||
"stylesheet",
|
||||
"sunt",
|
||||
"tdewolff",
|
||||
"Timestamptz",
|
||||
"tmpl",
|
||||
"tplinkap",
|
||||
"tplinkeap",
|
||||
"tplinkmodem",
|
||||
"tplinkplclogin",
|
||||
"tplinkrepeater",
|
||||
"tplinkwifi",
|
||||
"ulimits",
|
||||
"Veni",
|
||||
"vidi",
|
||||
"webmail",
|
||||
"XOXO",
|
||||
"zerolog"
|
||||
]
|
||||
}
|
||||
67
Dockerfile
67
Dockerfile
|
|
@ -1,67 +0,0 @@
|
|||
# Stage 1: Build the requirements.txt using Poetry
|
||||
FROM python:3.13-slim AS builder
|
||||
|
||||
# Set environment variables for Python
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PATH="${PATH}:/root/.local/bin"
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Poetry
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
# Copy only the poetry.lock/pyproject.toml to leverage Docker cache
|
||||
WORKDIR /app
|
||||
COPY pyproject.toml poetry.lock /app/
|
||||
|
||||
# Install dependencies and create requirements.txt
|
||||
RUN poetry self add poetry-plugin-export && poetry export --format=requirements.txt --output=requirements.txt --only=main --without-hashes
|
||||
|
||||
# Stage 2: Install dependencies and run the Django application
|
||||
FROM python:3.13-slim AS runner
|
||||
|
||||
# Set environment variables for Python
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
# Create a non-root user
|
||||
RUN useradd -ms /bin/bash appuser
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
libpq-dev \
|
||||
git \
|
||||
netcat-openbsd \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy the generated requirements.txt from the builder stage
|
||||
WORKDIR /app
|
||||
COPY --from=builder /app/requirements.txt /app/
|
||||
|
||||
# Install application dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy the rest of the application code
|
||||
COPY . /app/
|
||||
|
||||
# Change ownership of the application directory to the non-root user
|
||||
RUN chown -R appuser:appuser /app
|
||||
|
||||
# Switch to the non-root user
|
||||
USER appuser
|
||||
|
||||
# The port the application will listen on
|
||||
EXPOSE 8000
|
||||
|
||||
# Shared volume for static files, media files, and logs
|
||||
VOLUME ["/app/staticfiles", "/app/media", "/app/data"]
|
||||
|
||||
# Run startup script
|
||||
CMD ["./docker-entrypoint.sh"]
|
||||
|
|
@ -1 +0,0 @@
|
|||
Subproject commit 0225af06e957b5c44e3748687e2469ae9dfb96f2
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
"""https://fastapi.tiangolo.com/tutorial/dependencies/."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import lru_cache
|
||||
from typing import Annotated
|
||||
|
||||
import humanize
|
||||
from fastapi import Depends
|
||||
from reader import Reader, make_reader
|
||||
|
||||
from app.settings import DB_PATH
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_reader() -> Reader:
|
||||
"""Return the reader."""
|
||||
return make_reader(url=DB_PATH.as_posix(), search_enabled=True)
|
||||
|
||||
|
||||
def get_stats() -> str:
|
||||
"""Return the stats."""
|
||||
# db_size: int = DB_PATH.stat().st_size
|
||||
#
|
||||
# # Get the feed counts.
|
||||
# feed_counts: FeedCounts = get_reader().get_feed_counts()
|
||||
# total_feed_counts: int | None = feed_counts.total
|
||||
# if total_feed_counts is None:
|
||||
# total_feed_counts = 0
|
||||
#
|
||||
# # Get the entry counts.
|
||||
# entry_counts: EntryCounts = get_reader().get_entry_counts()
|
||||
# total_entry_counts: int | None = entry_counts.total
|
||||
# if total_entry_counts is None:
|
||||
# total_entry_counts = 0
|
||||
#
|
||||
# return f"{total_feed_counts} feeds ({total_entry_counts} entries) ~{humanize.naturalsize(db_size, binary=True)}"
|
||||
return f"0 feeds (0 entries) ~{humanize.naturalsize(0, binary=True)}"
|
||||
|
||||
|
||||
CommonReader = Annotated[Reader, Depends(get_reader)]
|
||||
CommonStats = Annotated[str, Depends(get_stats)]
|
||||
26
app/main.py
26
app/main.py
|
|
@ -1,26 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from app.routers.api import api_router
|
||||
from app.routers.static import static_router
|
||||
|
||||
app = FastAPI(
|
||||
title="FeedVault API",
|
||||
description="An API for FeedVault.",
|
||||
version="0.1.0",
|
||||
openapi_url="/api/v1/openapi.json",
|
||||
redoc_url=None,
|
||||
debug=True,
|
||||
)
|
||||
|
||||
|
||||
app.mount(path="/static", app=StaticFiles(directory="static"), name="static")
|
||||
app.include_router(router=api_router)
|
||||
app.include_router(router=static_router)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app=app, host="0.0.0.0", port=8000) # noqa: S104
|
||||
|
|
@ -1,61 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import datetime # noqa: TCH003
|
||||
from urllib.parse import unquote
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
from reader import ExceptionInfo, Feed, FeedNotFoundError
|
||||
|
||||
from app.dependencies import CommonReader # noqa: TCH001
|
||||
from app.validators import uri_validator
|
||||
|
||||
api_router = APIRouter(
|
||||
prefix="/api/v1",
|
||||
tags=["Feeds"],
|
||||
responses={404: {"description": "Not found"}},
|
||||
)
|
||||
|
||||
|
||||
class FeedOut(BaseModel):
|
||||
"""The feed we return to the user."""
|
||||
|
||||
url: str
|
||||
updated: datetime.datetime | None = None
|
||||
title: str | None = None
|
||||
link: str | None = None
|
||||
author: str | None = None
|
||||
subtitle: str | None = None
|
||||
version: str | None = None
|
||||
user_title: str | None = None
|
||||
added: datetime.datetime | None = None
|
||||
last_updated: datetime.datetime | None = None
|
||||
last_exception: ExceptionInfo | None = None
|
||||
updates_enabled: bool = True
|
||||
|
||||
|
||||
@api_router.get("/feeds", summary="Get all the feeds in the reader.", tags=["Feeds"])
|
||||
async def api_feeds(reader: CommonReader) -> list[Feed]:
|
||||
"""Return all the feeds in the reader."""
|
||||
return list(reader.get_feeds())
|
||||
|
||||
|
||||
@api_router.get(
|
||||
path="/feed/{feed_url:path}",
|
||||
summary="Get a feed from the reader.",
|
||||
tags=["Feeds"],
|
||||
response_model=FeedOut | dict[str, str],
|
||||
response_model_exclude_unset=True,
|
||||
)
|
||||
async def api_feed(feed_url: str, reader: CommonReader) -> Feed | dict[str, str]:
|
||||
"""Return a feed from the reader."""
|
||||
feed_url = unquote(feed_url)
|
||||
|
||||
if not uri_validator(feed_url):
|
||||
return {"message": "Invalid URL."}
|
||||
|
||||
try:
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
except FeedNotFoundError as e:
|
||||
return {"message": str(e)}
|
||||
return feed
|
||||
|
|
@ -1,250 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fastapi import APIRouter, File, Form, Request, UploadFile
|
||||
from fastapi.responses import FileResponse
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from reader import FeedExistsError, InvalidFeedURLError
|
||||
|
||||
from app.dependencies import CommonReader, CommonStats # noqa: TCH001
|
||||
from app.settings import MEDIA_ROOT
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
from fastapi.datastructures import Address
|
||||
from reader import Feed
|
||||
|
||||
|
||||
logger: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
static_router = APIRouter(tags=["HTML"])
|
||||
templates = Jinja2Templates(directory="templates")
|
||||
|
||||
|
||||
@static_router.get("/favicon.ico", summary="Favicon.", tags=["HTML"])
|
||||
async def favicon(request: Request):
|
||||
"""Favicon."""
|
||||
return FileResponse("static/favicon.ico")
|
||||
|
||||
|
||||
@static_router.get(path="/", summary="Index page.", tags=["HTML"])
|
||||
async def index(request: Request, reader: CommonReader, stats: CommonStats):
|
||||
"""Index page."""
|
||||
feeds: Iterable[Feed] = reader.get_feeds(limit=15)
|
||||
return templates.TemplateResponse(
|
||||
request=request,
|
||||
name="index.html",
|
||||
context={"feeds": feeds, "stats": stats},
|
||||
)
|
||||
|
||||
|
||||
@static_router.get(path="/feeds", summary="Feeds page.", tags=["HTML"])
|
||||
async def feeds(
|
||||
request: Request,
|
||||
reader: CommonReader,
|
||||
stats: CommonStats,
|
||||
next_url: str | None = None,
|
||||
prev_url: str | None = None,
|
||||
):
|
||||
"""Feeds page."""
|
||||
if next_url:
|
||||
feeds = list(reader.get_feeds(starting_after=next_url, limit=15))
|
||||
elif prev_url:
|
||||
feeds = list(reader.get_feeds(starting_after=prev_url, limit=15))
|
||||
else:
|
||||
feeds = list(reader.get_feeds(limit=15))
|
||||
|
||||
# This is the last feed on the page.
|
||||
next_url = feeds[-1].url if feeds else None
|
||||
|
||||
# This is the first feed on the page.
|
||||
prev_url = feeds[0].url if feeds else None
|
||||
|
||||
return templates.TemplateResponse(
|
||||
request=request,
|
||||
name="feeds.html",
|
||||
context={
|
||||
"feeds": feeds,
|
||||
"stats": stats,
|
||||
"next_url": next_url,
|
||||
"prev_url": prev_url,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@static_router.get(path="/feed/{feed_url:path}", summary="Feed page.", tags=["HTML"])
|
||||
async def feed(
|
||||
request: Request,
|
||||
feed_url: str,
|
||||
reader: CommonReader,
|
||||
stats: CommonStats,
|
||||
):
|
||||
"""Feed page."""
|
||||
feed: Feed = reader.get_feed(feed_url)
|
||||
entries = list(reader.get_entries(feed=feed.url))
|
||||
return templates.TemplateResponse(
|
||||
request=request,
|
||||
name="feed.html",
|
||||
context={"feed": feed, "entries": entries, "stats": stats},
|
||||
)
|
||||
|
||||
|
||||
@static_router.get(path="/search", summary="Search page.", tags=["HTML"])
|
||||
async def search( # noqa: PLR0913, PLR0917
|
||||
request: Request,
|
||||
q: str,
|
||||
reader: CommonReader,
|
||||
stats: CommonStats,
|
||||
next_feed: str | None = None,
|
||||
next_entry: str | None = None,
|
||||
prev_feed: str | None = None,
|
||||
prev_entry: str | None = None,
|
||||
):
|
||||
"""Search page."""
|
||||
if next_feed and next_entry:
|
||||
entries = list(
|
||||
reader.search_entries(q, starting_after=(next_feed, next_entry), limit=15),
|
||||
)
|
||||
elif prev_feed and prev_entry:
|
||||
entries = list(
|
||||
reader.search_entries(q, starting_after=(prev_feed, prev_entry), limit=15),
|
||||
)
|
||||
else:
|
||||
entries = list(reader.search_entries(q, limit=15))
|
||||
|
||||
# TODO(TheLovinator): We need to show the entries in the search results. # noqa: TD003
|
||||
reader.update_search()
|
||||
|
||||
return templates.TemplateResponse(
|
||||
request=request,
|
||||
name="search.html",
|
||||
context={
|
||||
"query": q,
|
||||
"entries": entries,
|
||||
"stats": stats,
|
||||
"next_feed": next_feed,
|
||||
"next_entry": next_entry,
|
||||
"prev_feed": prev_feed,
|
||||
"prev_entry": prev_entry,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@static_router.post(path="/upload", summary="Upload files.", tags=["HTML"])
|
||||
async def upload_files(request: Request, files: list[UploadFile] = File(...)):
|
||||
"""Upload files."""
|
||||
media_root: str = os.getenv(key="MEDIA_ROOT", default=MEDIA_ROOT.as_posix())
|
||||
file_infos: list[dict[str, str]] = []
|
||||
upload_time = int(time.time())
|
||||
|
||||
# Save metadata
|
||||
request_client: Address | None = request.client
|
||||
if request_client:
|
||||
host: str = request_client.host or "unknown"
|
||||
else:
|
||||
host = "unknown"
|
||||
|
||||
metadata = {
|
||||
"upload_time": upload_time,
|
||||
"files": [file.filename for file in files if file.filename],
|
||||
"ip": host,
|
||||
"user_agent": request.headers.get("user-agent") or "unknown",
|
||||
"description": request.headers.get("description") or "No description.",
|
||||
}
|
||||
metadata_path: Path = Path(media_root) / f"{upload_time}.json"
|
||||
metadata_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
metadata_path.write_text(str(metadata))
|
||||
|
||||
# Save uploaded files
|
||||
for file in files:
|
||||
if not file:
|
||||
logger.error("No file uploaded.")
|
||||
continue
|
||||
|
||||
if not file.filename:
|
||||
logger.error("No file name.")
|
||||
continue
|
||||
|
||||
file_path: Path = Path(media_root) / f"{upload_time}" / file.filename
|
||||
|
||||
content: bytes = b""
|
||||
while chunk := await file.read(1024): # Read in chunks of 1024 bytes
|
||||
content += chunk
|
||||
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
Path(file_path).write_bytes(content)
|
||||
|
||||
file_infos.append({"filename": file.filename})
|
||||
|
||||
return {"files_uploaded": file_infos}
|
||||
|
||||
|
||||
@static_router.get(path="/upload", summary="Upload page.", tags=["HTML"])
|
||||
async def upload_page(request: Request, stats: CommonStats):
|
||||
"""Upload page."""
|
||||
return templates.TemplateResponse(
|
||||
request=request,
|
||||
name="upload.html",
|
||||
context={"stats": stats},
|
||||
)
|
||||
|
||||
|
||||
@static_router.get(path="/contact", summary="Contact page.", tags=["HTML"])
|
||||
async def contact(request: Request, stats: CommonStats):
|
||||
"""Contact page."""
|
||||
return templates.TemplateResponse(
|
||||
request=request,
|
||||
name="contact.html",
|
||||
context={"stats": stats},
|
||||
)
|
||||
|
||||
|
||||
@static_router.post(path="/contact", summary="Contact page.", tags=["HTML"])
|
||||
async def contact_form(request: Request, stats: CommonStats, message: str = Form(...)):
|
||||
"""Contact page."""
|
||||
# TODO(TheLovinator): Send the message to the admin. # noqa: TD003
|
||||
return {
|
||||
"message": message,
|
||||
"stats": stats,
|
||||
}
|
||||
|
||||
|
||||
@static_router.get(path="/add", summary="Add feeds page.", tags=["HTML"])
|
||||
async def add_page(request: Request, stats: CommonStats):
|
||||
"""Add feeds page."""
|
||||
return templates.TemplateResponse(
|
||||
request=request,
|
||||
name="add.html",
|
||||
context={"stats": stats},
|
||||
)
|
||||
|
||||
|
||||
@static_router.post(path="/add", summary="Add feeds page.", tags=["HTML"])
|
||||
async def add_feed(
|
||||
reader: CommonReader,
|
||||
stats: CommonStats,
|
||||
feed_urls: str = Form(...),
|
||||
):
|
||||
"""Add feeds page."""
|
||||
feed_info: list[dict[str, str]] = []
|
||||
# Each line is a feed URL.
|
||||
for feed_url in feed_urls.split("\n"):
|
||||
try:
|
||||
reader.add_feed(feed_url.strip())
|
||||
feed_info.append({"url": feed_url.strip(), "status": "Added"})
|
||||
except FeedExistsError as e:
|
||||
feed_info.append({"url": feed_url.strip(), "status": str(e)})
|
||||
except InvalidFeedURLError as e:
|
||||
feed_info.append({"url": feed_url.strip(), "status": str(e)})
|
||||
|
||||
return {
|
||||
"feed_urls": feed_urls,
|
||||
"stats": stats,
|
||||
"feed_info": feed_info,
|
||||
}
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
"""Scrape https://github.com/rumca-js/RSS-Link-Database for RSS links."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import orjson
|
||||
from click import echo
|
||||
|
||||
|
||||
def scrape() -> str:
|
||||
"""Scrape.
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the RSS-Link-Database repository is not found.
|
||||
"""
|
||||
repository_path = Path("RSS-Link-Database")
|
||||
if not repository_path.exists():
|
||||
msg = "RSS-Link-Database repository not found."
|
||||
raise FileNotFoundError(msg)
|
||||
|
||||
rss_links: list[str] = []
|
||||
for file in repository_path.glob("*.json"):
|
||||
echo(f"Scraping {file.name}...")
|
||||
|
||||
with file.open("r", encoding="utf-8") as f:
|
||||
data = orjson.loads(f.read())
|
||||
|
||||
for d in data:
|
||||
if d.get("url"):
|
||||
rss_links.append(d["url"])
|
||||
|
||||
if d.get("link"):
|
||||
rss_links.append(d["link"])
|
||||
|
||||
rss_links = list(set(rss_links))
|
||||
return "\n".join(rss_links)
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from platformdirs import user_data_dir
|
||||
|
||||
DATA_DIR: str = user_data_dir(
|
||||
appname="FeedVault",
|
||||
appauthor="TheLovinator",
|
||||
roaming=True,
|
||||
)
|
||||
DB_PATH: Path = Path(DATA_DIR) / "reader.sqlite"
|
||||
MEDIA_ROOT: Path = Path(DATA_DIR) / "uploads"
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
|
||||
def uri_validator(url: str) -> bool:
|
||||
"""Validate a URI.
|
||||
|
||||
Args:
|
||||
url: The URI to validate.
|
||||
|
||||
Returns:
|
||||
True if the URI is valid, False otherwise.
|
||||
"""
|
||||
try:
|
||||
result: ParseResult = urlparse(url)
|
||||
return all([result.scheme, result.netloc])
|
||||
except AttributeError:
|
||||
return False
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from .add_steam_feeds import add_steam_feeds
|
||||
from .download_steam_ids import download_steam_ids
|
||||
from .grab_links import grab_links
|
||||
from .update_feeds import update_feeds
|
||||
|
||||
__all__: list[str] = [
|
||||
"add_steam_feeds",
|
||||
"download_steam_ids",
|
||||
"grab_links",
|
||||
"update_feeds",
|
||||
]
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
from cli.cli import app
|
||||
|
||||
if __name__ == "__main__":
|
||||
app()
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
import sys
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
|
||||
from reader import ParseError, Reader, StorageError, UpdateError
|
||||
from rich import print
|
||||
|
||||
from app.dependencies import get_reader
|
||||
from app.settings import DATA_DIR
|
||||
from cli.cli import app
|
||||
|
||||
|
||||
@app.command(
|
||||
name="add_steam_feeds",
|
||||
help="Add Steam feeds to the reader. Needs 'download_steam_ids' to be run first.",
|
||||
)
|
||||
def add_steam_feeds() -> None:
|
||||
"""Add the ids from "steam_ids.txt" to the reader."""
|
||||
reader: Reader = get_reader()
|
||||
print("Adding Steam feeds...")
|
||||
|
||||
file_path: Path = Path(DATA_DIR) / "steam_ids.txt"
|
||||
if not file_path.exists():
|
||||
print("File not found.")
|
||||
return
|
||||
|
||||
with file_path.open("r", encoding="utf-8") as f:
|
||||
steam_ids: list[str] = f.read().splitlines()
|
||||
|
||||
for count, steam_id in enumerate(steam_ids):
|
||||
try:
|
||||
reader.add_feed(f"https://store.steampowered.com/feeds/news/app/{steam_id}")
|
||||
print(f"[{count}/{len(steam_ids)}] Added feed: {steam_id}")
|
||||
|
||||
except ParseError as e:
|
||||
print(f"[bold red]Error parsing feed[/bold red] ({e})")
|
||||
|
||||
except UpdateError as e:
|
||||
print(f"[bold red]Error updating feed[/bold red] ({e})")
|
||||
|
||||
except StorageError as e:
|
||||
print(f"[bold red]Error updating feed[/bold red] ({e})")
|
||||
|
||||
except AssertionError as e:
|
||||
print(f"[bold red]Assertion error[/bold red] ({e})")
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("[bold red]Keyboard interrupt[/bold red]")
|
||||
reader.close()
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Added {len(steam_ids)} Steam feeds.")
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
import typer
|
||||
|
||||
app = typer.Typer(
|
||||
name="FeedVault CLI",
|
||||
no_args_is_help=True,
|
||||
)
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
from rich import print
|
||||
|
||||
from app.settings import DATA_DIR
|
||||
from cli.cli import app
|
||||
|
||||
|
||||
@app.command(
|
||||
name="download_steam_ids",
|
||||
help="Download Steam IDs from the Steam API.",
|
||||
)
|
||||
def download_steam_ids() -> None:
|
||||
"""Download Steam IDs from "https://api.steampowered.com/ISteamApps/GetAppList/v2/"."""
|
||||
print("Downloading Steam IDs...")
|
||||
|
||||
r: requests.Response = requests.get(
|
||||
"https://api.steampowered.com/ISteamApps/GetAppList/v2/",
|
||||
timeout=10,
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
data: dict[str, dict[str, list[dict[str, str]]]] = r.json()
|
||||
app_ids: list[dict[str, str]] = data["applist"]["apps"]
|
||||
|
||||
file_path: Path = Path(DATA_DIR) / "steam_ids.txt"
|
||||
with file_path.open("w", encoding="utf-8") as f:
|
||||
for app_id in app_ids:
|
||||
f.write(f"{app_id["appid"]}\n")
|
||||
|
||||
print(f"Steam IDs downloaded. {len(app_ids)} IDs saved to {file_path}.")
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
from rich import print
|
||||
|
||||
from app.scrapers.rss_link_database import scrape
|
||||
from cli.cli import app
|
||||
|
||||
|
||||
@app.command(
|
||||
name="grab_links",
|
||||
help="Grab RSS feeds from different sources.",
|
||||
)
|
||||
def grab_links() -> None:
|
||||
"""Grab RSS feeds from different sources."""
|
||||
print("Grabbing links...")
|
||||
rss_links: str = scrape()
|
||||
print(rss_links)
|
||||
|
|
@ -1,92 +0,0 @@
|
|||
import sys
|
||||
import traceback
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from reader import (
|
||||
Feed,
|
||||
ParseError,
|
||||
Reader,
|
||||
StorageError,
|
||||
UpdatedFeed,
|
||||
UpdateError,
|
||||
UpdateResult,
|
||||
)
|
||||
from rich import print
|
||||
|
||||
from app.dependencies import get_reader
|
||||
from cli.cli import app
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
|
||||
def _add_broken_feed_to_csv(feed: Feed | UpdateResult | None) -> None:
|
||||
"""Add a broken feed to a CSV file."""
|
||||
if feed is None:
|
||||
print("Feed is None.")
|
||||
return
|
||||
|
||||
with Path("broken_feeds.csv").open("a", encoding="utf-8") as f:
|
||||
f.write(f"{feed.url}\n")
|
||||
|
||||
|
||||
@app.command(
|
||||
name="update_feeds",
|
||||
help="Update all the feeds.",
|
||||
)
|
||||
def update_feeds() -> None:
|
||||
"""Update all the feeds."""
|
||||
reader: Reader = get_reader()
|
||||
print("Updating feeds...")
|
||||
|
||||
feeds: Iterable[Feed] = reader.get_feeds(
|
||||
broken=False,
|
||||
updates_enabled=True,
|
||||
new=True,
|
||||
)
|
||||
|
||||
total_feeds: int | None = reader.get_feed_counts(
|
||||
broken=False,
|
||||
updates_enabled=True,
|
||||
).total
|
||||
if not total_feeds:
|
||||
print("[bold red]No feeds to update[/bold red]")
|
||||
return
|
||||
|
||||
print(f"Feeds to update: {total_feeds}")
|
||||
|
||||
def update_feed(feed: Feed) -> None:
|
||||
try:
|
||||
updated_feed: UpdatedFeed | None = reader.update_feed(feed)
|
||||
if updated_feed is not None:
|
||||
print(
|
||||
f"New: [green]{updated_feed.new}[/green], modified: [yellow]{updated_feed.modified}[/yellow], unmodified: {updated_feed.unmodified} - {feed.url}", # noqa: E501
|
||||
)
|
||||
|
||||
except ParseError as e:
|
||||
print(f"[bold red]Error parsing feed[/bold red]: {feed.url} ({e})")
|
||||
|
||||
except UpdateError as e:
|
||||
print(f"[bold red]Error updating feed[/bold red]: {feed.url} ({e})")
|
||||
|
||||
except StorageError as e:
|
||||
print(f"[bold red]Error updating feed[/bold red]: {feed.url}")
|
||||
print(f"[bold red]Storage error[/bold red]: {e}")
|
||||
|
||||
except AssertionError:
|
||||
print(f"[bold red]Assertion error[/bold red]: {feed.url}")
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
reader.disable_feed_updates(feed)
|
||||
_add_broken_feed_to_csv(feed)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("[bold red]Keyboard interrupt[/bold red]")
|
||||
reader.close()
|
||||
sys.exit(1)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=50) as executor:
|
||||
executor.map(update_feed, feeds)
|
||||
|
||||
print(f"Updated {total_feeds} feeds.")
|
||||
15
config/asgi.py
Normal file
15
config/asgi.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
"""ASGI config for config project.
|
||||
|
||||
It exposes the ASGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/6.0/howto/deployment/asgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.asgi import get_asgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
|
||||
|
||||
application = get_asgi_application()
|
||||
15
config/celery.py
Normal file
15
config/celery.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
import os
|
||||
|
||||
from celery import Celery
|
||||
|
||||
# Set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
|
||||
|
||||
app = Celery("config")
|
||||
|
||||
# Using a string here means the worker doesn't have to serialize
|
||||
# the configuration object to child processes.
|
||||
app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
|
||||
# Load task modules from all registered Django apps.
|
||||
app.autodiscover_tasks()
|
||||
229
config/settings.py
Normal file
229
config/settings.py
Normal file
|
|
@ -0,0 +1,229 @@
|
|||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import sentry_sdk
|
||||
from dotenv import load_dotenv
|
||||
from platformdirs import user_data_dir
|
||||
|
||||
logger: logging.Logger = logging.getLogger("feedvault.settings")
|
||||
|
||||
load_dotenv(verbose=True)
|
||||
|
||||
TRUE_VALUES: set[str] = {"1", "true", "yes", "y", "on"}
|
||||
|
||||
|
||||
def env_bool(key: str, *, default: bool = False) -> bool:
|
||||
"""Read a boolean from the environment, accepting common truthy values.
|
||||
|
||||
Returns:
|
||||
bool: Parsed boolean value or the provided default when unset.
|
||||
"""
|
||||
value: str | None = os.getenv(key)
|
||||
if value is None:
|
||||
return default
|
||||
return value.strip().lower() in TRUE_VALUES
|
||||
|
||||
|
||||
def env_int(key: str, default: int) -> int:
|
||||
"""Read an integer from the environment with a fallback default.
|
||||
|
||||
Returns:
|
||||
int: Parsed integer value or the provided default when unset.
|
||||
"""
|
||||
value: str | None = os.getenv(key)
|
||||
return int(value) if value is not None else default
|
||||
|
||||
|
||||
DEBUG: bool = env_bool(key="DEBUG", default=True)
|
||||
TESTING: bool = (
|
||||
env_bool(key="TESTING", default=False)
|
||||
or "test" in sys.argv
|
||||
or "PYTEST_VERSION" in os.environ
|
||||
)
|
||||
|
||||
|
||||
def get_data_dir() -> Path:
|
||||
r"""Get the directory where the application data will be stored.
|
||||
|
||||
This directory is created if it does not exist.
|
||||
|
||||
Returns:
|
||||
Path: The directory where the application data will be stored.
|
||||
|
||||
For example, on Windows, it might be:
|
||||
`C:\Users\lovinator\AppData\Roaming\TheLovinator\FeedVault`
|
||||
|
||||
In this directory, application data such as media and static files will be stored.
|
||||
"""
|
||||
data_dir: str = user_data_dir(
|
||||
appname="FeedVault",
|
||||
appauthor="TheLovinator",
|
||||
roaming=True,
|
||||
ensure_exists=True,
|
||||
)
|
||||
return Path(data_dir)
|
||||
|
||||
|
||||
DATA_DIR: Path = get_data_dir()
|
||||
|
||||
ADMINS: list[tuple[str, str]] = [("Joakim Hellsén", "tlovinator@gmail.com")]
|
||||
BASE_DIR: Path = Path(__file__).resolve().parent.parent
|
||||
ROOT_URLCONF = "config.urls"
|
||||
SECRET_KEY: str = os.getenv("DJANGO_SECRET_KEY", default="")
|
||||
if not SECRET_KEY:
|
||||
logger.error("DJANGO_SECRET_KEY environment variable is not set.")
|
||||
sys.exit(1)
|
||||
|
||||
DEFAULT_FROM_EMAIL: str | None = os.getenv(key="EMAIL_HOST_USER", default=None)
|
||||
EMAIL_HOST: str = os.getenv(key="EMAIL_HOST", default="smtp.gmail.com")
|
||||
EMAIL_HOST_PASSWORD: str | None = os.getenv(key="EMAIL_HOST_PASSWORD", default=None)
|
||||
EMAIL_HOST_USER: str | None = os.getenv(key="EMAIL_HOST_USER", default=None)
|
||||
EMAIL_PORT: int = env_int(key="EMAIL_PORT", default=587)
|
||||
EMAIL_SUBJECT_PREFIX = "[FeedVault] "
|
||||
EMAIL_TIMEOUT: int = env_int(key="EMAIL_TIMEOUT", default=10)
|
||||
EMAIL_USE_LOCALTIME = True
|
||||
EMAIL_USE_TLS: bool = env_bool(key="EMAIL_USE_TLS", default=True)
|
||||
EMAIL_USE_SSL: bool = env_bool(key="EMAIL_USE_SSL", default=False)
|
||||
SERVER_EMAIL: str | None = os.getenv(key="EMAIL_HOST_USER", default=None)
|
||||
|
||||
LOGIN_REDIRECT_URL = "/"
|
||||
LOGIN_URL = "/accounts/login/"
|
||||
LOGOUT_REDIRECT_URL = "/"
|
||||
|
||||
ACCOUNT_EMAIL_VERIFICATION = "none"
|
||||
ACCOUNT_AUTHENTICATION_METHOD = "username"
|
||||
ACCOUNT_EMAIL_REQUIRED = False
|
||||
|
||||
MEDIA_ROOT: Path = DATA_DIR / "media"
|
||||
MEDIA_ROOT.mkdir(exist_ok=True)
|
||||
MEDIA_URL = "/media/"
|
||||
|
||||
STATIC_ROOT: Path = DATA_DIR / "staticfiles"
|
||||
STATIC_ROOT.mkdir(exist_ok=True)
|
||||
STATIC_URL = "/static/"
|
||||
STATICFILES_DIRS: list[Path] = [BASE_DIR / "static"]
|
||||
|
||||
TIME_ZONE = "UTC"
|
||||
WSGI_APPLICATION = "config.wsgi.application"
|
||||
|
||||
INTERNAL_IPS: list[str] = []
|
||||
if DEBUG:
|
||||
INTERNAL_IPS = ["127.0.0.1", "localhost"] # pyright: ignore[reportConstantRedefinition]
|
||||
|
||||
ALLOWED_HOSTS: list[str] = [".localhost", "127.0.0.1", "[::1]", "testserver"]
|
||||
if not DEBUG:
|
||||
ALLOWED_HOSTS = ["feedvault.se"] # pyright: ignore[reportConstantRedefinition]
|
||||
|
||||
LOGGING: dict[str, Any] = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"handlers": {"console": {"level": "DEBUG", "class": "logging.StreamHandler"}},
|
||||
"loggers": {
|
||||
"": {"handlers": ["console"], "level": "INFO", "propagate": True},
|
||||
"feedvault": {"handlers": ["console"], "level": "DEBUG", "propagate": False},
|
||||
"django": {"handlers": ["console"], "level": "INFO", "propagate": False},
|
||||
"django.utils.autoreload": {
|
||||
"handlers": ["console"],
|
||||
"level": "INFO",
|
||||
"propagate": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
INSTALLED_APPS: list[str] = [
|
||||
# Django built-in apps
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.staticfiles",
|
||||
"django.contrib.postgres",
|
||||
# Internal apps
|
||||
"twitch.apps.TwitchConfig",
|
||||
"kick.apps.KickConfig",
|
||||
"youtube.apps.YoutubeConfig",
|
||||
"core.apps.CoreConfig",
|
||||
# Third-party apps
|
||||
"django_celery_results",
|
||||
"django_celery_beat",
|
||||
]
|
||||
|
||||
MIDDLEWARE: list[str] = [
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
]
|
||||
|
||||
|
||||
TEMPLATES: list[dict[str, Any]] = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [BASE_DIR / "templates"],
|
||||
"APP_DIRS": True,
|
||||
"OPTIONS": {
|
||||
"context_processors": [
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.request",
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
DATABASES: dict[str, dict[str, Any]] = (
|
||||
{"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
|
||||
if TESTING
|
||||
else {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
"NAME": os.getenv("POSTGRES_DB", "feedvault"),
|
||||
"USER": os.getenv("POSTGRES_USER", "feedvault"),
|
||||
"PASSWORD": os.getenv("POSTGRES_PASSWORD", ""),
|
||||
"HOST": os.getenv("POSTGRES_HOST", "localhost"),
|
||||
"PORT": env_int("POSTGRES_PORT", 5432),
|
||||
"CONN_MAX_AGE": env_int("CONN_MAX_AGE", 60),
|
||||
"CONN_HEALTH_CHECKS": env_bool("CONN_HEALTH_CHECKS", default=True),
|
||||
"OPTIONS": {"connect_timeout": env_int("DB_CONNECT_TIMEOUT", 10)},
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if not TESTING:
|
||||
INSTALLED_APPS = [*INSTALLED_APPS, "debug_toolbar", "silk"] # pyright: ignore[reportConstantRedefinition]
|
||||
MIDDLEWARE = [ # pyright: ignore[reportConstantRedefinition]
|
||||
"debug_toolbar.middleware.DebugToolbarMiddleware",
|
||||
"silk.middleware.SilkyMiddleware",
|
||||
*MIDDLEWARE,
|
||||
]
|
||||
|
||||
if not DEBUG:
|
||||
sentry_sdk.init(
|
||||
dsn="https://1aa1ac672090fb795783de0e90a2b19f@o4505228040339456.ingest.us.sentry.io/4511055670738944",
|
||||
send_default_pii=True,
|
||||
enable_logs=True,
|
||||
traces_sample_rate=1.0,
|
||||
profile_session_sample_rate=1.0,
|
||||
profile_lifecycle="trace",
|
||||
)
|
||||
|
||||
REDIS_URL_CACHE: str = os.getenv(
|
||||
key="REDIS_URL_CACHE",
|
||||
default="redis://localhost:6379/0",
|
||||
)
|
||||
REDIS_URL_CELERY: str = os.getenv(
|
||||
key="REDIS_URL_CELERY",
|
||||
default="redis://localhost:6379/1",
|
||||
)
|
||||
|
||||
CACHES: dict[str, dict[str, str]] = {
|
||||
"default": {
|
||||
"BACKEND": "django.core.cache.backends.redis.RedisCache",
|
||||
"LOCATION": REDIS_URL_CACHE,
|
||||
},
|
||||
}
|
||||
|
||||
CELERY_BROKER_URL: str = REDIS_URL_CELERY
|
||||
CELERY_RESULT_BACKEND = "django-db"
|
||||
CELERY_RESULT_EXTENDED = True
|
||||
CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers:DatabaseScheduler"
|
||||
23
config/urls.py
Normal file
23
config/urls.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
"""URL configuration for config project.
|
||||
|
||||
The `urlpatterns` list routes URLs to views. For more information please see:
|
||||
https://docs.djangoproject.com/en/6.0/topics/http/urls/
|
||||
|
||||
Examples:
|
||||
Function views
|
||||
1. Add an import: from my_app import views
|
||||
2. Add a URL to urlpatterns: path('', views.home, name='home')
|
||||
Class-based views
|
||||
1. Add an import: from other_app.views import Home
|
||||
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
|
||||
Including another URLconf
|
||||
1. Import the include() function: from django.urls import include, path
|
||||
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
|
||||
"""
|
||||
|
||||
from django.contrib import admin
|
||||
from django.urls import path
|
||||
|
||||
urlpatterns = [
|
||||
path("admin/", admin.site.urls),
|
||||
]
|
||||
15
config/wsgi.py
Normal file
15
config/wsgi.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
"""WSGI config for config project.
|
||||
|
||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/6.0/howto/deployment/wsgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
|
||||
|
||||
application = get_wsgi_application()
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
version: "3"
|
||||
services:
|
||||
# feedvault:
|
||||
# container_name: feedvault
|
||||
# image: ghcr.io/thelovinator1/feedvault:latest
|
||||
# user: "1000:1000"
|
||||
# restart: always
|
||||
# environment:
|
||||
# - DEBUG=False
|
||||
# - SECRET_KEY=${SECRET_KEY}
|
||||
# - DB_NAME=feedvault
|
||||
# - DB_USER=feedvault
|
||||
# - DB_PASSWORD=${DB_PASSWORD}
|
||||
# - DB_HOST=feedvault_postgres
|
||||
# - DB_PORT=5432
|
||||
# - REDIS_PASSWORD=${REDIS_PASSWORD}
|
||||
# - REDIS_PORT=6379
|
||||
# - REDIS_HOST=garnet
|
||||
# volumes:
|
||||
# - /Docker/FeedVault/FeedVault/staticfiles:/app/staticfiles
|
||||
# - /mnt/Fourteen/Docker/FeedVault/media:/app/media
|
||||
garnet:
|
||||
container_name: garnet
|
||||
image: "ghcr.io/microsoft/garnet"
|
||||
user: "1000:1000"
|
||||
restart: always
|
||||
ulimits:
|
||||
memlock: -1
|
||||
command: ["--auth", "Password", "--password", "${REDIS_PASSWORD}"]
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- /Docker/FeedVault/Garnet:/data
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Exit on error
|
||||
set -e
|
||||
|
||||
# Debug
|
||||
set -x
|
||||
|
||||
# 1. Collect static files
|
||||
echo "Collect static files"
|
||||
python manage.py collectstatic --noinput
|
||||
echo "Collect static files done"
|
||||
|
||||
# 2. Apply database migrations
|
||||
echo "Apply database migrations"
|
||||
python manage.py migrate
|
||||
echo "Apply database migrations done"
|
||||
|
||||
# 3. Create cache table
|
||||
echo "Create cache table"
|
||||
python manage.py createcachetable
|
||||
echo "Create cache table done"
|
||||
|
||||
# https://docs.gunicorn.org/en/stable/design.html#how-many-workers
|
||||
num_cores=$(nproc --all)
|
||||
workers=$((2 * num_cores + 1))
|
||||
|
||||
# 4. Start server
|
||||
echo "Starting server with $workers workers"
|
||||
gunicorn --workers=$workers --bind=0.0.0.0:8000 feedvault.wsgi:application --log-level=info --access-logfile=- --error-logfile=- --forwarded-allow-ips="172.*,192.*" --proxy-allow-from="172.*,192.*"
|
||||
echo "Bye, love you"
|
||||
28
manage.py
Executable file
28
manage.py
Executable file
|
|
@ -0,0 +1,28 @@
|
|||
#!/usr/bin/env python
|
||||
"""Django's command-line utility for administrative tasks."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Run administrative tasks.
|
||||
|
||||
Raises:
|
||||
ImportError: If Django cannot be imported.
|
||||
"""
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
|
||||
try:
|
||||
from django.core.management import execute_from_command_line # noqa: PLC0415
|
||||
except ImportError as exc:
|
||||
msg = (
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
)
|
||||
raise ImportError(msg) from exc
|
||||
execute_from_command_line(sys.argv)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
125
pyproject.toml
125
pyproject.toml
|
|
@ -1,28 +1,117 @@
|
|||
[project]
|
||||
name = "feedvault"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"celery",
|
||||
"django-auto-prefetch",
|
||||
"django-celery-beat",
|
||||
"django-celery-results",
|
||||
"django-debug-toolbar",
|
||||
"django-silk",
|
||||
"django",
|
||||
"flower",
|
||||
"gunicorn",
|
||||
"hiredis",
|
||||
"index-now-for-python",
|
||||
"platformdirs",
|
||||
"psycopg[binary]",
|
||||
"python-dotenv",
|
||||
"redis",
|
||||
"sentry-sdk",
|
||||
"setproctitle",
|
||||
"sitemap-parser",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"celery-types",
|
||||
"django-stubs",
|
||||
"djlint",
|
||||
"hypothesis[django]",
|
||||
"pytest-cov",
|
||||
"pytest-django",
|
||||
"pytest-randomly",
|
||||
"pytest-xdist[psutil]",
|
||||
"pytest",
|
||||
]
|
||||
[tool.pytest.ini_options]
|
||||
DJANGO_SETTINGS_MODULE = "config.settings"
|
||||
python_files = ["test_*.py", "*_test.py"]
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py312"
|
||||
fix = true
|
||||
unsafe-fixes = true
|
||||
preview = true
|
||||
line-length = 120
|
||||
unsafe-fixes = true
|
||||
|
||||
format.docstring-code-format = true
|
||||
format.preview = true
|
||||
|
||||
lint.future-annotations = true
|
||||
lint.isort.force-single-line = true
|
||||
lint.pycodestyle.ignore-overlong-task-comments = true
|
||||
lint.pydocstyle.convention = "google"
|
||||
lint.select = ["ALL"]
|
||||
|
||||
# Don't automatically remove unused variables
|
||||
lint.unfixable = ["F841"]
|
||||
|
||||
lint.ignore = [
|
||||
"ANN201", # Checks that public functions and methods have return type annotations.
|
||||
"ARG001", # Checks for the presence of unused arguments in function definitions.
|
||||
"B008", # Checks for function calls in default function arguments.
|
||||
"CPY001", # Checks for the absence of copyright notices within Python files.
|
||||
"D100", # Checks for undocumented public module definitions.
|
||||
"D104", # Checks for undocumented public package definitions.
|
||||
"FIX002", # Checks for "TODO" comments.
|
||||
"RUF029", # Checks for functions declared async that do not await or otherwise use features requiring the function to be declared async.
|
||||
"ERA001", # Checks for commented-out Python code.
|
||||
"ANN002", # Checks that function *args arguments have type annotations.
|
||||
"ANN003", # Checks that function **kwargs arguments have type annotations.
|
||||
"C901", # Checks for functions with a high McCabe complexity.
|
||||
"CPY001", # Checks for the absence of copyright notices within Python files.
|
||||
"D100", # Checks for undocumented public module definitions.
|
||||
"D104", # Checks for undocumented public package definitions.
|
||||
"D105", # Checks for undocumented magic method definitions.
|
||||
"D106", # Checks for undocumented public class definitions, for nested classes.
|
||||
"E501", # Checks for lines that exceed the specified maximum character length.
|
||||
"ERA001", # Checks for commented-out Python code.
|
||||
"FIX002", # Checks for "TODO" comments.
|
||||
"PLR0911", # Checks for functions or methods with too many return statements.
|
||||
"PLR0912", # Checks for functions or methods with too many branches, including (nested) if, elif, and else branches, for loops, try-except clauses, and match and case statements.
|
||||
"PLR6301", # Checks for the presence of unused self parameter in methods definitions.
|
||||
"RUF012", # Checks for mutable default values in class attributes.
|
||||
"ARG001", # Checks for the presence of unused arguments in function definitions.
|
||||
|
||||
# Conflicting lint rules when using Ruff's formatter
|
||||
# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
||||
"COM812", # Checks for the absence of trailing commas.
|
||||
"COM819", # Checks for the presence of prohibited trailing commas.
|
||||
"D206", # Checks for docstrings that are indented with tabs.
|
||||
"D300", # Checks for docstrings that use '''triple single quotes''' instead of """triple double quotes""".
|
||||
"E111", # Checks for indentation with a non-multiple of 4 spaces.
|
||||
"E114", # Checks for indentation of comments with a non-multiple of 4 spaces.
|
||||
"E117", # Checks for over-indented code.
|
||||
"ISC001", # Checks for implicitly concatenated strings on a single line.
|
||||
"ISC002", # Checks for implicitly concatenated strings that span multiple lines.
|
||||
"Q000", # Checks for inline strings that use single quotes or double quotes, depending on the value of the lint.flake8-quotes.inline-quotes option.
|
||||
"Q001", # Checks for multiline strings that use single quotes or double quotes, depending on the value of the lint.flake8-quotes.multiline-quotes setting.
|
||||
"Q002", # Checks for docstrings that use single quotes or double quotes, depending on the value of the lint.flake8-quotes.docstring-quotes setting.
|
||||
"Q003", # Checks for strings that include escaped quotes, and suggests changing the quote style to avoid the need to escape them.
|
||||
"W191", # Checks for indentation that uses tabs.
|
||||
]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"tests/**/*.py" = ["S101", "ARG", "FBT", "PLR2004", "S311"]
|
||||
|
||||
[tool.ruff.lint.pydocstyle]
|
||||
convention = "google"
|
||||
"**/tests/**" = [
|
||||
"ARG",
|
||||
"FBT",
|
||||
"PLR0904",
|
||||
"PLR2004",
|
||||
"PLR6301",
|
||||
"S101",
|
||||
"S105",
|
||||
"S106",
|
||||
"S311",
|
||||
"SLF001",
|
||||
]
|
||||
"**/migrations/**" = ["RUF012"]
|
||||
|
||||
[tool.djlint]
|
||||
profile = "jinja"
|
||||
format_attribute_template_tags = true
|
||||
profile = "django"
|
||||
ignore = "H021,H030"
|
||||
|
||||
[tool.uv.sources]
|
||||
sitemap-parser = { git = "https://github.com/TheLovinator1/sitemap-parser.git" }
|
||||
|
|
|
|||
|
|
@ -1,5 +0,0 @@
|
|||
ruff
|
||||
djlint
|
||||
pre-commit
|
||||
pytest
|
||||
httpx
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
fastapi
|
||||
humanize
|
||||
jinja2
|
||||
python-dotenv
|
||||
python-multipart
|
||||
reader
|
||||
orjson
|
||||
typer
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h2>Add feeds</h2>
|
||||
<form method="post" action='{{ url_for("add_feed") }}'>
|
||||
<p>
|
||||
<label for="feed_urls">Feed URLs</label>
|
||||
<textarea id="feed_urls"
|
||||
name="feed_urls"
|
||||
rows="4"
|
||||
cols="50"
|
||||
required
|
||||
placeholder="Enter the URLs of the feeds you want to add"></textarea>
|
||||
</p>
|
||||
<button type="submit">Add feeds</button>
|
||||
</form>
|
||||
{% endblock content %}
|
||||
|
|
@ -1,144 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
{% if description %}<meta name="description" content="{{ description }}" />{% endif %}
|
||||
{% if keywords %}<meta name="keywords" content="{{ keywords }}" />{% endif %}
|
||||
{% if author %}<meta name="author" content="{{ author }}" />{% endif %}
|
||||
{% if canonical %}<link rel="canonical" href="{{ canonical }}" />{% endif %}
|
||||
<title>{{ title | default("FeedVault") }}</title>
|
||||
<style>
|
||||
html {
|
||||
max-width: 88ch;
|
||||
padding: calc(1vmin + 0.5rem);
|
||||
margin-inline: auto;
|
||||
font-size: clamp(1em, 0.909em + 0.45vmin, 1.25em);
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto,
|
||||
Helvetica, Arial, sans-serif;
|
||||
color-scheme: light dark;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 2.5rem;
|
||||
font-weight: 600;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
header {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.search {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
margin-top: 1rem;
|
||||
margin-inline: auto;
|
||||
}
|
||||
|
||||
.leftright {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.left {
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
.right {
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
textarea {
|
||||
width: 100%;
|
||||
height: 10rem;
|
||||
resize: vertical;
|
||||
}
|
||||
|
||||
.messages {
|
||||
list-style-type: none;
|
||||
}
|
||||
|
||||
.error {
|
||||
color: red;
|
||||
}
|
||||
|
||||
.success {
|
||||
color: green;
|
||||
}
|
||||
|
||||
.warning {
|
||||
color: orange;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
{% if messages %}
|
||||
<ul class="messages" role="alert" aria-live="polite">
|
||||
{% for message in messages %}
|
||||
<li {% if message.tags %}class="{{ message.tags }}"{% endif %}
|
||||
role="alert">{{ message }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
<header>
|
||||
<h1>
|
||||
<a href='{{ url_for("index") }}' aria-label="FeedVault Home">FeedVault</a>
|
||||
</h1>
|
||||
</header>
|
||||
<div class="leftright">
|
||||
<div class="left">
|
||||
<small>Archive of
|
||||
<a href="https://en.wikipedia.org/wiki/Web_feed"
|
||||
aria-label="Wikipedia article on web feeds">web feeds</a>.
|
||||
{{ stats }}
|
||||
</small>
|
||||
</div>
|
||||
<div class="right">
|
||||
<form role="search"
|
||||
action='{{ url_for("search") }}'
|
||||
method="get"
|
||||
class="search"
|
||||
aria-label="Search form">
|
||||
<input type="search" name="q" placeholder="Search" aria-label="Search input" />
|
||||
<button type="submit" aria-label="Search button">Search</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
<nav aria-label="Main navigation">
|
||||
<small>
|
||||
<div class="leftright">
|
||||
<div class="left">
|
||||
<a href='{{ url_for("index") }}' aria-label="Home page">Home</a> |
|
||||
<a href='{{ url_for("feeds") }}' aria-label="Feeds page">Feeds</a> |
|
||||
<a href='{{ url_for("upload_files") }}' aria-label="Upload page">Upload</a> |
|
||||
<a href='{{ url_for("contact") }}' aria-label="Contact page">Contact</a>
|
||||
</div>
|
||||
<div class="right">
|
||||
<a href="https://github.com/TheLovinator1/FeedVault"
|
||||
aria-label="GitHub page">GitHub</a> |
|
||||
<a href="https://github.com/sponsors/TheLovinator1"
|
||||
aria-label="Donate page">Donate</a>
|
||||
</div>
|
||||
</div>
|
||||
</small>
|
||||
</nav>
|
||||
<hr />
|
||||
<main>
|
||||
{% block content %}<!-- default content -->{% endblock %}
|
||||
</main>
|
||||
<hr />
|
||||
<footer>
|
||||
<small>
|
||||
<div class="leftright">
|
||||
<div class="left">Web scraping is not a crime.</div>
|
||||
<div class="right">No rights reserved.</div>
|
||||
</div>
|
||||
<div class="leftright">
|
||||
<div class="left">TheLovinator#9276 on Discord</div>
|
||||
<div class="right">A birthday present for Plipp ❤️</div>
|
||||
</div>
|
||||
</small>
|
||||
</footer>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h2>Contact</h2>
|
||||
<p>
|
||||
If you have any questions, suggestions, or feedback, feel free to contact me. I am always happy to help.
|
||||
<br>
|
||||
You can contact me through the following methods:
|
||||
</p>
|
||||
<p>
|
||||
<ul>
|
||||
<li>
|
||||
<a href="https://github.com/TheLovinator1/FeedVault.se/issues">GitHub issues</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="mailto:tlovinator@gmail.com">tlovinator@gmail.com</a>
|
||||
</li>
|
||||
<li>TheLovinator#9276 on Discord</li>
|
||||
<li>Use the form below to send me a message</li>
|
||||
</ul>
|
||||
</p>
|
||||
<h3>Send a message</h3>
|
||||
<form method="post" action='{{ url_for("contact") }}'>
|
||||
<p>
|
||||
<label for="message">Message</label>
|
||||
<textarea id="message"
|
||||
name="message"
|
||||
rows="4"
|
||||
cols="50"
|
||||
required
|
||||
placeholder="Enter your message"></textarea>
|
||||
</p>
|
||||
<button type="submit">Send message</button>
|
||||
</form>
|
||||
{% endblock content %}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h2>{{ feed.url }}</h2>
|
||||
<p>{{ feed.description }}</p>
|
||||
<h3>Entries</h3>
|
||||
{% if entries|length == 0 %}<p>No entries found.</p>{% endif %}
|
||||
<ul>
|
||||
{% for entry in entries %}
|
||||
<li>
|
||||
<a href="{{ entry.link }}">{{ entry.title }}</a>
|
||||
<p>{{ entry.summary | safe }}</p>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endblock content %}
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h2>Latest Feeds</h2>
|
||||
<ul>
|
||||
{% for feed in feeds %}
|
||||
<li>
|
||||
<a href="{{ url_for('feed', feed_url=feed.url) }}">{{ feed.url }}</a>
|
||||
<p>{{ feed.description }}</p>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% if next_url %}<a href='{{ url_for("feeds") }}?next_url={{ next_url }}'>Next</a>{% endif %}
|
||||
{% if previous_url %}<a href='{{ url_for("feeds") }}?previous_url={{ previous_url }}'>Previous</a>{% endif %}
|
||||
{% endblock content %}
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h2>Welcome, archivist!</h2>
|
||||
<h3>Latest Feeds</h3>
|
||||
<ul>
|
||||
{% if not feeds %}<p>No feeds found.</p>{% endif %}
|
||||
{% for feed in feeds %}
|
||||
<li>
|
||||
<a href="{{ url_for('feed', feed_url=feed.url) }}">{{ feed.url }}</a>
|
||||
<p>{{ feed.description }}</p>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
<h3>FAQ</h3>
|
||||
<summary>What are web feeds?</summary>
|
||||
<p>
|
||||
Web feeds are a way to distribute content on the web. They allow users to access updates from websites without having to visit them directly. Feeds are typically used for news websites, blogs, and other sites that frequently update content.
|
||||
<br>
|
||||
You can read more about web feeds on <a href="https://en.wikipedia.org/wiki/Web_feed">Wikipedia</a>.
|
||||
</p>
|
||||
<hr>
|
||||
<summary>What is FeedVault?</summary>
|
||||
<p>
|
||||
FeedVault is a service that archives web feeds. It allows users to access and search for historical content from various websites. The service is designed to preserve the history of the web and provide a reliable source for accessing content that may no longer be available on the original websites.
|
||||
</p>
|
||||
<hr>
|
||||
<summary>Why archive feeds?</summary>
|
||||
<p>
|
||||
Web feeds are a valuable source of information, and archiving them ensures that the content is preserved for future reference. By archiving feeds, we can ensure that historical content is available for research, analysis, and other purposes. Additionally, archiving feeds can help prevent the loss of valuable information due to website changes, outages, or other issues.
|
||||
</p>
|
||||
<hr>
|
||||
<summary>How can I access the archived feeds?</summary>
|
||||
<p>
|
||||
You can access the archived feeds through the website or API. The website provides a user interface for searching and browsing the feeds, while the API allows you to access the feeds programmatically. You can also download the feeds in various formats, such as JSON, XML, or RSS.
|
||||
</p>
|
||||
{% endblock content %}
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h2>
|
||||
Searched for:
|
||||
"{{ query }}"
|
||||
</h2>
|
||||
{% if entries %}
|
||||
{% for entry in entries %}
|
||||
<a href="{{ url_for('feed', feed_url=entry.feed_url) }}">{{ entry.feed_url }} →</a>
|
||||
<br>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<p>No entries found.</p>
|
||||
{% endif %}
|
||||
{% if next_feed and next_entry %}
|
||||
<a href='{{ url_for("search") }}?query={{ query }}&next_feed={{ next_feed }}&next_entry={{ next_entry }}'>Next</a>
|
||||
{% endif %}
|
||||
{% if prev_feed and prev_entry %}
|
||||
<a href='{{ url_for("search") }}?query={{ query }}&prev_feed={{ prev_feed }}&prev_entry={{ prev_entry }}'>Previous</a>
|
||||
{% endif %}
|
||||
{% endblock content %}
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h2>Upload</h2>
|
||||
<p>
|
||||
You can upload files to the archive here.
|
||||
<br>
|
||||
Things you can upload include:
|
||||
<ul>
|
||||
<li>Lists of URLs</li>
|
||||
<li>Databases</li>
|
||||
<li>Backups</li>
|
||||
<li>Archives</li>
|
||||
<li>Anything else that is relevant</li>
|
||||
</ul>
|
||||
<br>
|
||||
Uploaded files will be manually reviewed and indexed. If you have any questions, feel free to <a href='{{ url_for("contact") }}''>contact me</a>.
|
||||
<br>
|
||||
You can also provide a description for the file you are uploading.
|
||||
</p>
|
||||
<form enctype="multipart/form-data"
|
||||
method="post"
|
||||
action='{{ url_for("upload_files") }}'>
|
||||
<p>
|
||||
<input type="file" name="files" required multiple>
|
||||
<br>
|
||||
<br>
|
||||
<label for="description">Description (optional)</label>
|
||||
<textarea id="description"
|
||||
name="description"
|
||||
rows="4"
|
||||
cols="50"
|
||||
placeholder="Enter a description for the file"></textarea>
|
||||
</p>
|
||||
<input type="checkbox" id="public" name="public">
|
||||
<label for="public">Public</label>
|
||||
<br>
|
||||
<br>
|
||||
<small>
|
||||
Public files will be listed on FeedVault for everyone to see and download.
|
||||
<br>
|
||||
Private files will only be used for indexing feeds.
|
||||
</small>
|
||||
<br>
|
||||
<br>
|
||||
<button type="submit">Upload file</button>
|
||||
</form>
|
||||
{% endblock content %}
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
from typing import TYPE_CHECKING
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from app.main import app
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from httpx import Response
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def test_read_main() -> None:
|
||||
"""Test the main page."""
|
||||
# Send a GET request to the app
|
||||
response: Response = client.get("/")
|
||||
|
||||
# Check if the response status code is 200 OK
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check if the response contains the expected text
|
||||
html_text = '<a href="https://en.wikipedia.org/wiki/Web_feed">web feeds</a>.'
|
||||
assert html_text in response.text
|
||||
Loading…
Add table
Add a link
Reference in a new issue