Lemon sadness

This commit is contained in:
Joakim Hellsén 2024-05-20 04:34:51 +02:00
commit bfe90aa69d
No known key found for this signature in database
GPG key ID: D196AE66FEBE1DC9
52 changed files with 1564 additions and 2492 deletions

View file

@ -1,6 +1,11 @@
DEBUG=True
SECRET_KEY=
EMAIL_HOST_USER=
EMAIL_HOST_PASSWORD=
DISCORD_WEBHOOK_URL=
DATABASE_PATH=/data
DB_NAME=feedvault
DB_USER=feedvault
DB_PASSWORD=
DB_HOST=192.168.1.2
DB_PORT=5433
REDIS_PASSWORD=
REDIS_PORT=6380
REDIS_HOST=192.168.1.2
REDIS_PASSWORD=

10
.vscode/launch.json vendored
View file

@ -11,6 +11,14 @@
"program": "${workspaceFolder}\\manage.py",
"args": ["runserver"],
"django": true
}
},
{
"name": "Python manage.py command",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}\\manage.py",
"args": ["update_feeds"],
"justMyCode": false
},
]
}

View file

@ -35,6 +35,7 @@
"gofeed",
"gomod",
"gorm",
"Hiredis",
"hitronhub",
"homerouter",
"hotspot",
@ -54,6 +55,7 @@
"lscr",
"makemigrations",
"malformedurl",
"memlock",
"meowning",
"mmcdole",
"Monero",
@ -98,6 +100,7 @@
"tplinkplclogin",
"tplinkrepeater",
"tplinkwifi",
"ulimits",
"Veni",
"vidi",
"webmail",

View file

@ -1,54 +1,46 @@
version: "3"
services:
# Django - Web framework
feedvault:
container_name: feedvault
image: ghcr.io/thelovinator1/feedvault:latest
# feedvault:
# container_name: feedvault
# image: ghcr.io/thelovinator1/feedvault:latest
# user: "1000:1000"
# restart: always
# environment:
# - DEBUG=False
# - SECRET_KEY=${SECRET_KEY}
# - DB_NAME=feedvault
# - DB_USER=feedvault
# - DB_PASSWORD=${DB_PASSWORD}
# - DB_HOST=feedvault_postgres
# - DB_PORT=5432
# - REDIS_PASSWORD=${REDIS_PASSWORD}
# - REDIS_PORT=6379
# - REDIS_HOST=garnet
# volumes:
# - /Docker/FeedVault/FeedVault/staticfiles:/app/staticfiles
# - /mnt/Fourteen/Docker/FeedVault/media:/app/media
feedvault_postgres:
container_name: feedvault_postgres
image: postgres:16
# user: "1000:1000"
restart: always
ports:
- "5432:5432"
environment:
POSTGRES_USER: feedvault
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_DB: feedvault
volumes:
- /Docker/FeedVault/PostgreSQL:/var/lib/postgresql/data
garnet:
container_name: garnet
image: "ghcr.io/microsoft/garnet"
user: "1000:1000"
restart: always
networks:
- feedvault_web
environment:
- SECRET_KEY=${SECRET_KEY}
- DEBUG=${DEBUG}
- EMAIL_HOST_USER=${EMAIL_HOST_USER}
- EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD}
- DISCORD_WEBHOOK_URL=${DISCORD_WEBHOOK_URL}
ulimits:
memlock: -1
command: ["--auth", "Password", "--password", "${REDIS_PASSWORD}"]
ports:
- "6379:6379"
volumes:
- /mnt/Fourteen/Docker/FeedVault/staticfiles:/app/staticfiles
- /mnt/Fourteen/Docker/FeedVault/media:/app/media
- /mnt/Fourteen/Docker/FeedVault/data:/app/data
# Nginx - Reverse proxy
web:
container_name: feedvault_web
image: lscr.io/linuxserver/nginx:latest
restart: always
environment:
- PUID=1000
- PGID=1000
- TZ=Europe/Stockholm
expose:
- 80
- 443
volumes:
- /mnt/Fourteen/Docker/FeedVault/Nginx:/config
networks:
- feedvault_web
- feedvault_tunnel
# Cloudflare Tunnel - Securely connect your server to Cloudflare
tunnel:
container_name: feedvault_tunnel
image: cloudflare/cloudflared:latest
command: tunnel --no-autoupdate run --token $TUNNEL_TOKEN
restart: always
networks:
- feedvault_tunnel
environment:
- TUNNEL_URL=http://feedvault_web:80
networks:
feedvault_tunnel:
driver: bridge
feedvault_web:
driver: bridge
- /Docker/FeedVault/Garnet:/data

1
feeds/admin.py Normal file
View file

@ -0,0 +1 @@
# Register your models here.

6
feeds/apps.py Normal file
View file

@ -0,0 +1,6 @@
from django.apps import AppConfig
class FeedsConfig(AppConfig):
default_auto_field: str = "django.db.models.BigAutoField"
name = "feeds"

224
feeds/get_reader.py Normal file
View file

@ -0,0 +1,224 @@
from __future__ import annotations
import logging
from functools import lru_cache
from typing import TYPE_CHECKING, Any, Iterable, Iterator, Self
from django.db.models import Q
from reader import ExceptionInfo, FeedExistsError, FeedNotFoundError, Reader, make_reader
from reader._types import (
EntryForUpdate, # noqa: PLC2701
EntryUpdateIntent,
FeedData,
FeedFilter,
FeedForUpdate, # noqa: PLC2701
FeedUpdateIntent,
SearchType, # noqa: PLC2701
StorageType, # noqa: PLC2701
)
from .models import Entry, Feed
if TYPE_CHECKING:
import datetime
from django.db.models.manager import BaseManager
logger = logging.getLogger(__name__)
class EmptySearch(SearchType): ...
class EntriesForUpdateIterator:
def __init__(self, entries: Iterable[tuple[str, str]]) -> None:
self.entries: Iterator[tuple[str, str]] = iter(entries)
def __iter__(self) -> Self:
return self
def __next__(self) -> EntryForUpdate:
try:
feed_url, entry_id = next(self.entries)
except StopIteration:
raise StopIteration from None
print(f"{feed_url=}, {entry_id=}") # noqa: T201
entry_data: dict[str, Any] | None = (
Entry.objects.filter(Q(feed__url=feed_url) & Q(id=entry_id))
.values("updated", "published", "data_hash", "data_hash_changed")
.first()
)
if not entry_data:
return None
return EntryForUpdate(
updated=entry_data.get("updated"),
published=entry_data.get("published"),
hash=entry_data.get("data_hash"),
hash_changed=entry_data.get("data_hash_changed"),
)
class DjangoStorage(StorageType):
# TODO(TheLovinator): Implement all methods from StorageType.
default_search_cls = EmptySearch
def __enter__(self: DjangoStorage) -> None:
"""Called when Reader is used as a context manager."""
# TODO(TheLovinator): Should we check if we have migrations to apply?
def __exit__(self: DjangoStorage, *_: object) -> None:
"""Called when Reader is used as a context manager."""
# TODO(TheLovinator): Should we close the connection?
def close(self: DjangoStorage) -> None:
"""Called by Reader.close()."""
# TODO(TheLovinator): Should we close the connection?
def add_feed(self, url: str, /, added: datetime.datetime) -> None:
"""Called by Reader.add_feed().
Args:
url: The URL of the feed.
added: The time the feed was added.
Raises:
FeedExistsError: Feed already exists. Bases: FeedError
"""
if Feed.objects.filter(url=url).exists():
msg: str = f"Feed already exists: {url}"
raise FeedExistsError(msg)
feed = Feed(url=url, added=added)
feed.save()
def get_feeds_for_update(self, filter: FeedFilter): # noqa: A002
"""Called by update logic.
Args:
filter: The filter to apply.
Returns:
A lazy iterable.
"""
logger.debug(f"{filter=}") # noqa: G004
feeds: BaseManager[Feed] = Feed.objects.all() # TODO(TheLovinator): Don't get all values, use filter.
for feed in feeds:
yield FeedForUpdate(
url=feed.url,
updated=feed.updated,
http_etag=feed.http_etag,
http_last_modified=feed.http_last_modified,
stale=feed.stale,
last_updated=feed.last_updated,
last_exception=bool(feed.last_exception_type_name),
hash=feed.data_hash,
)
def update_feed(self, intent: FeedUpdateIntent, /) -> None:
"""Called by update logic.
Args:
intent: Data to be passed to Storage when updating a feed.
Raises:
FeedNotFoundError
"""
feed: Feed = Feed.objects.get(url=intent.url)
if feed is None:
msg: str = f"Feed not found: {intent.url}"
raise FeedNotFoundError(msg)
feed.last_updated = intent.last_updated
feed.http_etag = intent.http_etag
feed.http_last_modified = intent.http_last_modified
feed_data: FeedData | None = intent.feed
if feed_data is not None:
feed.title = feed_data.title
feed.link = feed_data.link
feed.author = feed_data.author
feed.subtitle = feed_data.subtitle
feed.version = feed_data.version
if intent.last_exception is not None:
last_exception: ExceptionInfo = intent.last_exception
feed.last_exception_type_name = last_exception.type_name
feed.last_exception_value = last_exception.value_str
feed.last_exception_traceback = last_exception.traceback_str
feed.save()
def set_feed_stale(self, url: str, stale: bool, /) -> None: # noqa: FBT001
"""Used by update logic tests.
Args:
url: The URL of the feed.
stale: Whether the next update should update all entries, regardless of their hash or updated.
Raises:
FeedNotFoundError
"""
feed: Feed = Feed.objects.get(url=url)
if feed is None:
msg: str = f"Feed not found: {url}"
raise FeedNotFoundError(msg)
feed.stale = stale
feed.save()
def get_entries_for_update(self, entries: Iterable[tuple[str, str]], /) -> EntriesForUpdateIterator:
for feed_url, entry_id in entries:
logger.debug(f"{feed_url=}, {entry_id=}") # noqa: G004
entries_list = list(entries)
print(f"{entries_list=}") # noqa: T201
return EntriesForUpdateIterator(entries)
def add_or_update_entries(self, intents: Iterable[EntryUpdateIntent], /) -> None:
"""Called by update logic.
Args:
intents: Data to be passed to Storage when updating a feed.
Raises:
FeedNotFoundError
"""
msg = "Not implemented yet."
raise NotImplementedError(msg)
for intent in intents:
feed_id, entry_id = intent.entry.resource_id
logger.debug(f"{feed_id=}, {entry_id=}") # noqa: G004
# TODO(TheLovinator): Implement this method. Use Entry.objects.get_or_create()/Entry.objects.bulk_create()?
# TODO(TheLovinator): Raise FeedNotFoundError if feed does not exist.
def make_search(self) -> SearchType:
"""Called by Reader.make_search().
Returns:
A Search instance.
"""
return EmptySearch()
@lru_cache(maxsize=1)
def get_reader() -> Reader:
"""Create a Reader instance.
reader = get_reader()
reader.add_feed("https://example.com/feed", added=datetime.datetime.now())
reader.update_feeds()
Returns:
A Reader instance.
"""
return make_reader(
"",
_storage=DjangoStorage(),
search_enabled=False,
)

View file

View file

@ -0,0 +1,16 @@
from typing import TYPE_CHECKING
from django.core.management.base import BaseCommand
if TYPE_CHECKING:
from reader import Reader
class Command(BaseCommand):
help = "Update feeds"
def handle(self, *args, **options) -> None:
from feeds.get_reader import get_reader # noqa: PLC0415
reader: Reader = get_reader()
reader.update_feeds()

View file

@ -0,0 +1,110 @@
# Generated by Django 5.0.6 on 2024-05-20 00:49
import django.db.models.deletion
import feeds.models
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Entry',
fields=[
('id', models.TextField(help_text='The entry id.', primary_key=True, serialize=False)),
('updated', models.DateTimeField(help_text='The date the entry was last updated, according to the feed.', null=True)),
('title', models.TextField(help_text='The title of the entry.', null=True)),
('link', models.TextField(help_text='The URL of the entry.', null=True)),
('author', models.TextField(help_text='The author of the feed.', null=True)),
('published', models.DateTimeField(help_text='The date the entry was published.', null=True)),
('summary', models.TextField(help_text='A summary of the entry.', null=True)),
('read', models.BooleanField(default=False, help_text='Whether the entry has been read.')),
('read_modified', models.DateTimeField(help_text='When read was last modified, None if that never.', null=True)),
('added', models.DateTimeField(help_text='The date when the entry was added (first updated) to reader.', null=True)),
('added_by', models.TextField(help_text="The source of the entry. One of 'feed', 'user'.", null=True)),
('last_updated', models.DateTimeField(help_text='The date when the entry was last retrieved by reader.', null=True)),
('first_updated', models.DateTimeField(help_text='The date when the entry was first retrieved by reader.', null=True)),
('first_updated_epoch', models.DateTimeField(help_text='The date when the entry was first retrieved by reader, as an epoch timestamp.', null=True)),
('feed_order', models.PositiveIntegerField(help_text='The order of the entry in the feed.', null=True)),
('recent_sort', models.PositiveIntegerField(help_text='The order of the entry in the recent list.', null=True)),
('sequence', models.BinaryField(help_text='The sequence of the entry in the feed.', null=True)),
('original_feed', models.TextField(help_text='The URL of the original feed of the entry. If the feed URL never changed, the same as feed_url.', null=True)),
('data_hash', models.TextField(help_text='The hash of the entry data.', null=True)),
('data_hash_changed', models.BooleanField(default=False, help_text='Whether the data hash has changed since the last update.')),
('important', models.BooleanField(default=False, help_text='Whether the entry is important.')),
('important_modified', models.DateTimeField(help_text='When important was last modified, None if that never.', null=True)),
],
),
migrations.CreateModel(
name='Feed',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(help_text='The URL of the feed.', unique=True)),
('updated', models.DateTimeField(help_text='The date the feed was last updated, according to the feed.', null=True)),
('title', models.TextField(help_text='The title of the feed.', null=True)),
('link', models.TextField(help_text='The URL of a page associated with the feed.', null=True)),
('author', models.TextField(help_text='The author of the feed.', null=True)),
('subtitle', models.TextField(help_text='A description or subtitle for the feed.', null=True)),
('version', models.TextField(help_text='The version of the feed.', null=True)),
('user_title', models.TextField(help_text='User-defined feed title.', null=True)),
('added', models.DateTimeField(auto_now_add=True, help_text='The date when the feed was added.')),
('last_updated', models.DateTimeField(help_text='The date when the feed was last retrieved by reader.', null=True)),
('last_exception_type_name', models.TextField(help_text='The fully qualified name of the exception type.', null=True)),
('last_exception_value', models.TextField(help_text='The exception value.', null=True)),
('last_exception_traceback', models.TextField(help_text='The exception traceback.', null=True)),
('updates_enabled', models.BooleanField(default=True, help_text='Whether updates are enabled for the feed.')),
('stale', models.BooleanField(default=False, help_text='Whether the next update should update all entries, regardless of their hash or updated.')),
('http_etag', models.TextField(help_text='The HTTP ETag header.', null=True)),
('http_last_modified', models.TextField(help_text='The HTTP Last-Modified header.', null=True)),
('data_hash', models.TextField(help_text='The hash of the feed data.', null=True)),
],
),
migrations.CreateModel(
name='UploadedFeed',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(help_text='The file that was uploaded.', upload_to=feeds.models.get_upload_path)),
('original_filename', models.TextField(help_text='The original filename of the file.')),
('created_at', models.DateTimeField(auto_now_add=True, help_text='The time the file was uploaded.')),
('has_been_processed', models.BooleanField(default=False, help_text='Has the file content been added to the archive?')),
('public', models.BooleanField(default=False, help_text='Is the file public?')),
('description', models.TextField(blank=True, help_text='Description added by user.')),
('notes', models.TextField(blank=True, help_text='Notes from admin.')),
],
options={
'verbose_name': 'Uploaded file',
'verbose_name_plural': 'Uploaded files',
'ordering': ['-created_at'],
},
),
migrations.CreateModel(
name='Enclosure',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('href', models.TextField(help_text='The file URL.')),
('type', models.TextField(help_text='The file content type.', null=True)),
('length', models.PositiveIntegerField(help_text='The file length.', null=True)),
('entry', models.ForeignKey(help_text='The entry this enclosure is for.', on_delete=django.db.models.deletion.CASCADE, related_name='enclosures', to='feeds.entry')),
],
),
migrations.CreateModel(
name='Content',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', models.TextField(help_text='The content value.')),
('type', models.TextField(help_text='The content type.', null=True)),
('language', models.TextField(help_text='The content language.', null=True)),
('entry', models.ForeignKey(help_text='The entry this content is for.', on_delete=django.db.models.deletion.CASCADE, related_name='content', to='feeds.entry')),
],
),
migrations.AddField(
model_name='entry',
name='feed',
field=models.ForeignKey(help_text='The feed this entry is from.', on_delete=django.db.models.deletion.CASCADE, related_name='entries', to='feeds.feed'),
),
]

View file

@ -1,4 +1,4 @@
# Generated by Django 5.0.3 on 2024-03-15 13:21
# Generated by Django 5.0.6 on 2024-05-20 01:19
from django.db import migrations, models
@ -6,13 +6,13 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('feedvault', '0001_initial'),
('feeds', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='feed',
name='status',
field=models.IntegerField(null=True),
name='data_hash',
field=models.BinaryField(help_text='The hash of the feed data.', null=True),
),
]

View file

131
feeds/models.py Normal file
View file

@ -0,0 +1,131 @@
"""These models are used to store the data from https://reader.readthedocs.io/en/latest/api.html#reader.Feed."""
from __future__ import annotations
import typing
import uuid
from pathlib import Path
from django.db import models
def get_upload_path(instance: UploadedFeed, filename: str) -> str:
"""Don't save the file with the original filename."""
ext: str = Path(filename).suffix
unix_time: int = int(instance.created_at.timestamp())
filename = f"{unix_time}-{uuid.uuid4().hex}{ext}"
return f"uploads/{filename}"
class UploadedFeed(models.Model):
"""A file uploaded to the server by a user."""
file = models.FileField(upload_to=get_upload_path, help_text="The file that was uploaded.")
original_filename = models.TextField(help_text="The original filename of the file.")
created_at = models.DateTimeField(auto_now_add=True, help_text="The time the file was uploaded.")
has_been_processed = models.BooleanField(default=False, help_text="Has the file content been added to the archive?")
public = models.BooleanField(default=False, help_text="Is the file public?")
description = models.TextField(blank=True, help_text="Description added by user.")
notes = models.TextField(blank=True, help_text="Notes from admin.")
class Meta:
"""Meta information for the uploaded file model."""
ordering: typing.ClassVar[list[str]] = ["-created_at"]
verbose_name: str = "Uploaded file"
verbose_name_plural: str = "Uploaded files"
def __str__(self: UploadedFeed) -> str:
return f"{self.original_filename} - {self.created_at}"
class Feed(models.Model):
url = models.URLField(unique=True, help_text="The URL of the feed.")
updated = models.DateTimeField(help_text="The date the feed was last updated, according to the feed.", null=True)
title = models.TextField(help_text="The title of the feed.", null=True)
link = models.TextField(help_text="The URL of a page associated with the feed.", null=True)
author = models.TextField(help_text="The author of the feed.", null=True)
subtitle = models.TextField(help_text="A description or subtitle for the feed.", null=True)
version = models.TextField(help_text="The version of the feed.", null=True)
user_title = models.TextField(help_text="User-defined feed title.", null=True)
added = models.DateTimeField(help_text="The date when the feed was added.", auto_now_add=True)
last_updated = models.DateTimeField(help_text="The date when the feed was last retrieved by reader.", null=True)
last_exception_type_name = models.TextField(help_text="The fully qualified name of the exception type.", null=True)
last_exception_value = models.TextField(help_text="The exception value.", null=True)
last_exception_traceback = models.TextField(help_text="The exception traceback.", null=True)
updates_enabled = models.BooleanField(help_text="Whether updates are enabled for the feed.", default=True)
stale = models.BooleanField(
help_text="Whether the next update should update all entries, regardless of their hash or updated.",
default=False,
)
http_etag = models.TextField(help_text="The HTTP ETag header.", null=True)
http_last_modified = models.TextField(help_text="The HTTP Last-Modified header.", null=True)
data_hash = models.BinaryField(help_text="The hash of the feed data.", null=True)
def __str__(self) -> str:
return f"{self.title} ({self.url})" if self.title else self.url
class Entry(models.Model):
feed = models.ForeignKey(
Feed, on_delete=models.CASCADE, help_text="The feed this entry is from.", related_name="entries"
)
id = models.TextField(primary_key=True, help_text="The entry id.")
updated = models.DateTimeField(help_text="The date the entry was last updated, according to the feed.", null=True)
title = models.TextField(help_text="The title of the entry.", null=True)
link = models.TextField(help_text="The URL of the entry.", null=True)
author = models.TextField(help_text="The author of the feed.", null=True)
published = models.DateTimeField(help_text="The date the entry was published.", null=True)
summary = models.TextField(help_text="A summary of the entry.", null=True)
read = models.BooleanField(help_text="Whether the entry has been read.", default=False)
read_modified = models.DateTimeField(help_text="When read was last modified, None if that never.", null=True)
added = models.DateTimeField(help_text="The date when the entry was added (first updated) to reader.", null=True)
added_by = models.TextField(help_text="The source of the entry. One of 'feed', 'user'.", null=True)
last_updated = models.DateTimeField(help_text="The date when the entry was last retrieved by reader.", null=True)
first_updated = models.DateTimeField(help_text="The date when the entry was first retrieved by reader.", null=True)
first_updated_epoch = models.DateTimeField(
help_text="The date when the entry was first retrieved by reader, as an epoch timestamp.", null=True
)
feed_order = models.PositiveIntegerField(help_text="The order of the entry in the feed.", null=True)
recent_sort = models.PositiveIntegerField(help_text="The order of the entry in the recent list.", null=True)
sequence = models.BinaryField(help_text="The sequence of the entry in the feed.", null=True)
original_feed = models.TextField(
help_text="The URL of the original feed of the entry. If the feed URL never changed, the same as feed_url.",
null=True,
)
data_hash = models.TextField(help_text="The hash of the entry data.", null=True)
data_hash_changed = models.BooleanField(
help_text="Whether the data hash has changed since the last update.", default=False
)
important = models.BooleanField(help_text="Whether the entry is important.", default=False)
important_modified = models.DateTimeField(
help_text="When important was last modified, None if that never.", null=True
)
def __str__(self) -> str:
return f"{self.title} ({self.link})" if self.title and self.link else self.id
class Content(models.Model):
entry = models.ForeignKey(
Entry, on_delete=models.CASCADE, help_text="The entry this content is for.", related_name="content"
)
value = models.TextField(help_text="The content value.")
type = models.TextField(help_text="The content type.", null=True)
language = models.TextField(help_text="The content language.", null=True)
def __str__(self) -> str:
max_length = 50
return self.value[:max_length] + "..." if len(self.value) > max_length else self.value
class Enclosure(models.Model):
entry = models.ForeignKey(
Entry, on_delete=models.CASCADE, help_text="The entry this enclosure is for.", related_name="enclosures"
)
href = models.TextField(help_text="The file URL.")
type = models.TextField(help_text="The file content type.", null=True)
length = models.PositiveIntegerField(help_text="The file length.", null=True)
def __str__(self) -> str:
return self.href

1
feeds/tests.py Normal file
View file

@ -0,0 +1 @@
# Create your tests here.

26
feeds/urls.py Normal file
View file

@ -0,0 +1,26 @@
from __future__ import annotations
from django.contrib.sitemaps import GenericSitemap
from django.urls import include, path
from feedvault.sitemaps import StaticViewSitemap
from .models import Feed
from .views import AddView, FeedsView, FeedView, IndexView, SearchView, UploadView
app_name: str = "feeds"
sitemaps = {
"static": StaticViewSitemap,
"feeds": GenericSitemap({"queryset": Feed.objects.all(), "date_field": "created_at"}),
}
urlpatterns: list = [
path(route="", view=IndexView.as_view(), name="index"),
path("__debug__/", include("debug_toolbar.urls")),
path(route="feed/<int:feed_id>/", view=FeedView.as_view(), name="feed"),
path(route="feeds/", view=FeedsView.as_view(), name="feeds"),
path(route="add/", view=AddView.as_view(), name="add"),
path(route="upload/", view=UploadView.as_view(), name="upload"),
path(route="search/", view=SearchView.as_view(), name="search"),
]

175
feeds/views.py Normal file
View file

@ -0,0 +1,175 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any
from django.contrib import messages
from django.core.paginator import EmptyPage, Page, Paginator
from django.db.models.manager import BaseManager
from django.http import HttpRequest, HttpResponse
from django.shortcuts import get_object_or_404, redirect, render
from django.template import loader
from django.views import View
from reader import InvalidFeedURLError
from feeds.get_reader import get_reader
from feeds.models import Entry, Feed, UploadedFeed
if TYPE_CHECKING:
from django.core.files.uploadedfile import UploadedFile
from django.db.models.manager import BaseManager
from reader import Reader
logger: logging.Logger = logging.getLogger(__name__)
class HtmxHttpRequest(HttpRequest):
htmx: Any
class IndexView(View):
"""Index path."""
def get(self, request: HttpRequest) -> HttpResponse:
"""Load the index page."""
template = loader.get_template(template_name="index.html")
context: dict[str, str] = {
"description": "FeedVault allows users to archive and search their favorite web feeds.",
"keywords": "feed, rss, atom, archive, rss list",
"author": "TheLovinator",
"canonical": "https://feedvault.se/",
"title": "FeedVault",
}
return HttpResponse(content=template.render(context=context, request=request))
class FeedView(View):
"""A single feed."""
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse: # noqa: ANN002, ANN003
"""Load the feed page."""
feed_id: str = kwargs.get("feed_id", None)
if not feed_id:
return HttpResponse(content="No id", status=400)
feed: Feed = get_object_or_404(Feed, pk=feed_id)
entries: BaseManager[Entry] = Entry.objects.filter(feed=feed).order_by("-added")[:100]
context: dict[str, Any] = {
"feed": feed,
"entries": entries,
"description": f"{feed.subtitle}" or f"Archive of {feed.url}",
"keywords": "feed, rss, atom, archive, rss list",
"author": f"{feed.author}" or "FeedVault",
"canonical": f"https://feedvault.se/feed/{feed.pk}/",
"title": f"{feed.title}" or "FeedVault",
}
return render(request=request, template_name="feed.html", context=context)
class FeedsView(View):
"""All feeds."""
def get(self, request: HtmxHttpRequest) -> HttpResponse:
"""All feeds."""
feeds: BaseManager[Feed] = Feed.objects.only("id", "url")
paginator = Paginator(object_list=feeds, per_page=100)
page_number = int(request.GET.get("page", default=1))
try:
pages: Page = paginator.get_page(page_number)
except EmptyPage:
return HttpResponse("")
context: dict[str, str | Page | int] = {
"feeds": pages,
"description": "An archive of web feeds",
"keywords": "feed, rss, atom, archive, rss list",
"author": "TheLovinator",
"canonical": "https://feedvault.se/feeds/",
"title": "Feeds",
"page": page_number,
}
template_name = "partials/feeds.html" if request.htmx else "feeds.html"
return render(request, template_name, context)
class AddView(View):
"""Add a feed."""
def get(self, request: HttpRequest) -> HttpResponse:
"""Load the index page."""
template = loader.get_template(template_name="index.html")
context: dict[str, str] = {
"description": "FeedVault allows users to archive and search their favorite web feeds.",
"keywords": "feed, rss, atom, archive, rss list",
"author": "TheLovinator",
"canonical": "https://feedvault.se/",
}
return HttpResponse(content=template.render(context=context, request=request))
def post(self, request: HttpRequest) -> HttpResponse:
"""Add a feed."""
urls: str | None = request.POST.get("urls", None)
if not urls:
return HttpResponse(content="No urls", status=400)
reader: Reader = get_reader()
for url in urls.split("\n"):
clean_url: str = url.strip()
try:
reader.add_feed(clean_url)
messages.success(request, f"Added {clean_url}")
except InvalidFeedURLError:
logger.exception("Error adding %s", clean_url)
messages.error(request, f"Error adding {clean_url}")
messages.success(request, "Feeds added")
return redirect("feeds:index")
class UploadView(View):
"""Upload a file."""
def post(self, request: HttpRequest) -> HttpResponse:
"""Upload a file."""
file: UploadedFile | None = request.FILES.get("file", None)
if not file:
return HttpResponse(content="No file", status=400)
# Save file to media folder
UploadedFeed.objects.create(user=request.user, file=file, original_filename=file.name)
# Render the index page.
messages.success(request, f"{file.name} uploaded")
messages.info(request, "If the file was marked as public, it will be shown on the feeds page. ")
return redirect("feeds:index")
class SearchView(View):
"""Search view."""
def get(self, request: HtmxHttpRequest) -> HttpResponse:
"""Load the search page."""
query: str | None = request.GET.get("q", None)
if not query:
return FeedsView().get(request)
# TODO(TheLovinator): #20 Search more fields
# https://github.com/TheLovinator1/FeedVault/issues/20
feeds: BaseManager[Feed] = Feed.objects.filter(url__icontains=query).order_by("-added")[:100]
context = {
"feeds": feeds,
"description": f"Search results for {query}",
"keywords": f"feed, rss, atom, archive, rss list, {query}",
"author": "TheLovinator",
"canonical": f"https://feedvault.se/search/?q={query}",
"title": f"Search results for {query}",
"query": query,
}
return render(request, "search.html", context)

View file

@ -1,78 +0,0 @@
from __future__ import annotations
from django.http import HttpRequest # noqa: TCH002
from ninja import ModelSchema, NinjaAPI
from ninja.pagination import paginate
from feedvault.models import Domain, Entry, Feed
api_v1 = NinjaAPI(
title="FeedVault API",
version="0.1.0",
description="FeedVault API",
urls_namespace="api_v1",
)
class FeedOut(ModelSchema):
class Meta:
model = Feed
fields: str = "__all__"
class EntriesOut(ModelSchema):
class Meta:
model = Entry
fields: str = "__all__"
class DomainsOut(ModelSchema):
class Meta:
model = Domain
fields: str = "__all__"
@api_v1.get("/feeds/", response=list[FeedOut])
@paginate
def list_feeds(request: HttpRequest) -> None:
"""Get a list of feeds."""
return Feed.objects.all() # type: ignore # noqa: PGH003
@api_v1.get("/feeds/{feed_id}/", response=FeedOut)
def get_feed(request: HttpRequest, feed_id: int) -> Feed:
"""Get a feed by ID."""
return Feed.objects.get(id=feed_id)
@api_v1.get("/feeds/{feed_id}/entries/", response=list[EntriesOut])
@paginate
def list_entries(request: HttpRequest, feed_id: int) -> list[Entry]:
"""Get a list of entries for a feed."""
return Entry.objects.filter(feed_id=feed_id) # type: ignore # noqa: PGH003
@api_v1.get("/entries/", response=list[EntriesOut])
@paginate
def list_all_entries(request: HttpRequest) -> list[Entry]:
"""Get a list of entries."""
return Entry.objects.all() # type: ignore # noqa: PGH003
@api_v1.get("/entries/{entry_id}/", response=EntriesOut)
def get_entry(request: HttpRequest, entry_id: int) -> Entry:
"""Get an entry by ID."""
return Entry.objects.get(id=entry_id)
@api_v1.get("/domains/", response=list[DomainsOut])
@paginate
def list_domains(request: HttpRequest) -> list[Domain]:
"""Get a list of domains."""
return Domain.objects.all() # type: ignore # noqa: PGH003
@api_v1.get("/domains/{domain_id}/", response=DomainsOut)
def get_domain(request: HttpRequest, domain_id: int) -> Domain:
"""Get a domain by ID."""
return Domain.objects.get(id=domain_id)

View file

@ -2,7 +2,5 @@ from django.apps import AppConfig
class FeedVaultConfig(AppConfig):
"""FeedVault app configuration."""
default_auto_field: str = "django.db.models.BigAutoField"
name: str = "feedvault"

View file

@ -15,7 +15,7 @@ def add_global_context(request: HttpRequest) -> dict[str, str | int]:
Returns:
A dictionary with the global context.
"""
from .models import Feed # noqa: PLC0415
from feeds.models import Feed # noqa: PLC0415
amount_of_feeds: int = Feed.objects.count()
return {"amount_of_feeds": amount_of_feeds}

View file

@ -1,376 +0,0 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from urllib.parse import ParseResult, urlparse
import dateparser
import feedparser
from django.utils import timezone
from feedparser import FeedParserDict
from feedvault.models import Author, Domain, Entry, Feed, FeedAddResult, Generator, Publisher
if TYPE_CHECKING:
import datetime
from django.contrib.auth.models import AbstractBaseUser, AnonymousUser
logger: logging.Logger = logging.getLogger(__name__)
def get_domain(url: str | None) -> None | str:
"""Get the domain of a URL."""
if not url:
return None
# Parse the URL.
parsed_url: ParseResult = urlparse(url)
if not parsed_url:
logger.error("Error parsing URL: %s", url)
return None
# Get the domain.
return str(parsed_url.netloc)
def get_author(parsed_feed: dict) -> Author:
"""Get the author of a feed.
Args:
parsed_feed: The parsed feed.
Returns:
The author of the feed. If the author doesn't exist, it will be created.
"""
# A dictionary with details about the author of this entry.
author_detail: dict = parsed_feed.get("author_detail", {})
author = Author(
name=author_detail.get("name", ""),
href=author_detail.get("href", ""),
email=author_detail.get("email", ""),
)
# Create the author if it doesn't exist.
try:
author: Author = Author.objects.get(name=author.name, email=author.email, href=author.href)
except Author.DoesNotExist:
author.save()
logger.info("Created author: %s", author)
return author
def def_generator(parsed_feed: dict) -> Generator:
"""Get the generator of a feed.
Args:
parsed_feed: The parsed feed.
Returns:
The generator of the feed. If the generator doesn't exist, it will be created.
"""
generator_detail: dict = parsed_feed.get("generator_detail", {})
generator = Generator(
name=generator_detail.get("name", ""),
href=generator_detail.get("href", ""),
version=generator_detail.get("version", ""),
)
# Create the generator if it doesn't exist.
try:
generator: Generator = Generator.objects.get(
name=generator.name,
href=generator.href,
version=generator.version,
)
except Generator.DoesNotExist:
generator.save()
logger.info("Created generator: %s", generator)
return generator
def get_publisher(parsed_feed: dict) -> Publisher:
"""Get the publisher of a feed.
Args:
parsed_feed: The parsed feed.
Returns:
The publisher of the feed. If the publisher doesn't exist, it will be created.
"""
publisher_detail: dict = parsed_feed.get("publisher_detail", {})
publisher = Publisher(
name=publisher_detail.get("name", ""),
href=publisher_detail.get("href", ""),
email=publisher_detail.get("email", ""),
)
# Create the publisher if it doesn't exist.
try:
publisher: Publisher = Publisher.objects.get(
name=publisher.name,
href=publisher.href,
email=publisher.email,
)
except Publisher.DoesNotExist:
publisher.save()
logger.info("Created publisher: %s", publisher)
return publisher
def parse_feed(url: str | None) -> dict | None:
"""Parse a feed.
Args:
url: The URL of the feed.
Returns:
The parsed feed.
"""
# TODO(TheLovinator): Backup the feed URL. # noqa: TD003
if not url:
return None
# Parse the feed.
parsed_feed: dict = feedparser.parse(url)
if not parsed_feed:
return None
return parsed_feed
def add_entry(feed: Feed, entry: FeedParserDict) -> Entry | None:
"""Add an entry to the database.
Args:
entry: The entry to add.
feed: The feed the entry belongs to.
"""
author: Author = get_author(parsed_feed=entry)
publisher: Publisher = get_publisher(parsed_feed=entry)
pre_updated_parsed: str = str(entry.get("updated_parsed", ""))
updated_parsed: datetime.datetime | None = (
dateparser.parse(date_string=str(pre_updated_parsed)) if pre_updated_parsed else None
)
pre_published_parsed: str = str(entry.get("published_parsed", ""))
published_parsed: datetime.datetime | None = (
dateparser.parse(date_string=str(pre_published_parsed)) if pre_published_parsed else None
)
pre_expired_parsed: str = str(entry.get("expired_parsed", ""))
expired_parsed: datetime.datetime | None = (
dateparser.parse(date_string=str(pre_expired_parsed)) if pre_expired_parsed else None
)
pre_created_parsed = str(entry.get("created_parsed", ""))
created_parsed: datetime.datetime | None = (
dateparser.parse(date_string=str(pre_created_parsed)) if pre_created_parsed else None
)
entry_id = entry.get("id", "")
if not entry_id:
logger.error("Entry ID not found: %s", entry)
added_entry, created = Entry.objects.update_or_create(
feed=feed,
entry_id=entry_id,
defaults={
"author": entry.get("author", ""),
"author_detail": author,
"comments": entry.get("comments", ""),
"content": entry.get("content", {}),
"contributors": entry.get("contributors", {}),
"created": entry.get("created", ""),
"created_parsed": created_parsed,
"enclosures": entry.get("enclosures", []),
"expired": entry.get("expired", ""),
"expired_parsed": expired_parsed,
"license": entry.get("license", ""),
"link": entry.get("link", ""),
"links": entry.get("links", []),
"published": entry.get("published", ""),
"published_parsed": published_parsed,
"publisher": entry.get("publisher", ""),
"publisher_detail": publisher,
"source": entry.get("source", {}),
"summary": entry.get("summary", ""),
"summary_detail": entry.get("summary_detail", {}),
"tags": entry.get("tags", []),
"title": entry.get("title", ""),
"title_detail": entry.get("title_detail", {}),
"updated": entry.get("updated", ""),
"updated_parsed": updated_parsed,
},
)
if created:
logger.info("Created entry: %s", added_entry)
return added_entry
logger.info("Updated entry: %s", added_entry)
return added_entry
def add_domain_to_db(url: str | None) -> Domain | None:
"""Add a domain to the database.
Args:
url: The URL of the domain.
Returns:
The domain that was added.
"""
domain_url: None | str = get_domain(url=url)
if not domain_url:
return None
# Create the domain if it doesn't exist.
domain: Domain
domain, created = Domain.objects.get_or_create(url=domain_url)
if created:
logger.info("Created domain: %s", domain.url)
domain.save()
return domain
def populate_feed(url: str | None, user: AbstractBaseUser | AnonymousUser) -> Feed | None:
"""Populate the feed with entries.
Args:
url: The URL of the feed.
user: The user adding the feed.
Returns:
The feed that was added.
"""
domain: Domain | None = add_domain_to_db(url=url)
if not domain:
return None
# Parse the feed.
parsed_feed: dict | None = parse_feed(url=url)
if not parsed_feed:
return None
author: Author = get_author(parsed_feed=parsed_feed)
generator: Generator = def_generator(parsed_feed=parsed_feed)
publisher: Publisher = get_publisher(parsed_feed=parsed_feed)
pre_published_parsed: str = str(parsed_feed.get("published_parsed", ""))
published_parsed: datetime.datetime | None = (
dateparser.parse(date_string=str(pre_published_parsed)) if pre_published_parsed else None
)
pre_updated_parsed: str = str(parsed_feed.get("updated_parsed", ""))
updated_parsed: datetime.datetime | None = (
dateparser.parse(date_string=str(pre_updated_parsed)) if pre_updated_parsed else None
)
pre_modified: str = str(parsed_feed.get("modified", ""))
modified: timezone.datetime | None = dateparser.parse(date_string=pre_modified) if pre_modified else None
# Create or update the feed
feed, created = Feed.objects.update_or_create(
feed_url=url,
domain=domain,
defaults={
"user": user,
"last_checked": timezone.now(),
"bozo": parsed_feed.get("bozo", 0),
"bozo_exception": parsed_feed.get("bozo_exception", ""),
"encoding": parsed_feed.get("encoding", ""),
"etag": parsed_feed.get("etag", ""),
"headers": parsed_feed.get("headers", {}),
"href": parsed_feed.get("href", ""),
"modified": modified,
"namespaces": parsed_feed.get("namespaces", {}),
"status": parsed_feed.get("status", 0),
"version": parsed_feed.get("version", ""),
"author": parsed_feed.get("author", ""),
"author_detail": author,
"cloud": parsed_feed.get("cloud", {}),
"contributors": parsed_feed.get("contributors", {}),
"docs": parsed_feed.get("docs", ""),
"errorreportsto": parsed_feed.get("errorreportsto", ""),
"generator": parsed_feed.get("generator", ""),
"generator_detail": generator,
"icon": parsed_feed.get("icon", ""),
"feed_id": parsed_feed.get("id", ""),
"image": parsed_feed.get("image", {}),
"info": parsed_feed.get("info", ""),
"language": parsed_feed.get("language", ""),
"license": parsed_feed.get("license", ""),
"link": parsed_feed.get("link", ""),
"links": parsed_feed.get("links", []),
"logo": parsed_feed.get("logo", ""),
"published": parsed_feed.get("published", ""),
"published_parsed": published_parsed,
"publisher": parsed_feed.get("publisher", ""),
"publisher_detail": publisher,
"rights": parsed_feed.get("rights", ""),
"rights_detail": parsed_feed.get("rights_detail", {}),
"subtitle": parsed_feed.get("subtitle", ""),
"subtitle_detail": parsed_feed.get("subtitle_detail", {}),
"tags": parsed_feed.get("tags", []),
"textinput": parsed_feed.get("textinput", {}),
"title": parsed_feed.get("title", ""),
"title_detail": parsed_feed.get("title_detail", {}),
"ttl": parsed_feed.get("ttl", ""),
"updated": parsed_feed.get("updated", ""),
"updated_parsed": updated_parsed,
},
)
grab_entries(feed=feed)
if created:
logger.info("Created feed: %s", feed)
return feed
logger.info("Updated feed: %s", feed)
return feed
def grab_entries(feed: Feed) -> None | list[Entry]:
"""Grab the entries from a feed.
Args:
feed: The feed to grab the entries from.
Returns:
The entries that were added. If no entries were added, None is returned.
"""
# Set the last checked time to now.
feed.last_checked = timezone.now()
feed.save()
entries_added: list[Entry] = []
# Parse the feed.
parsed_feed: dict | None = parse_feed(url=feed.feed_url)
if not parsed_feed:
return None
entries = parsed_feed.get("entries", [])
for entry in entries:
added_entry: Entry | None = add_entry(feed=feed, entry=entry)
if not added_entry:
continue
entries_added.append(added_entry)
logger.info("Added entries: %s", entries_added)
return entries_added
def add_url(url: str, user: AbstractBaseUser | AnonymousUser) -> FeedAddResult:
"""Add a feed to the database so we can grab entries from it later."""
domain: Domain | None = add_domain_to_db(url=url)
if not domain:
return FeedAddResult(feed=None, created=False, error="Domain not found")
# Add the URL to the database.
_feed, _created = Feed.objects.get_or_create(feed_url=url, user=user, domain=domain)
return FeedAddResult(feed=_feed, created=_created, error=None)

View file

@ -1,66 +0,0 @@
from __future__ import annotations
from collections import defaultdict
from datetime import timedelta
from threading import Thread
from django.core.management.base import BaseCommand, no_translations
from django.db.models import Q
from django.utils import timezone
from rich.console import Console
from rich.progress import Progress
from feedvault.feeds import grab_entries
from feedvault.models import Feed
console = Console()
class DomainUpdater(Thread):
def __init__(self, feeds: list[Feed], progress: Progress, *args, **kwargs) -> None: # noqa: ANN002, ANN003
"""Update feeds in a separate thread.
Args:
feeds: The feeds to update.
progress: The Rich progress bar.
*args: Arbitrary positional arguments.
**kwargs: Arbitrary keyword arguments.
"""
super().__init__(*args, **kwargs)
self.feeds: list[Feed] = feeds
self.progress: Progress = progress
def run(self) -> None:
with self.progress as progress:
task = progress.add_task("[cyan]Updating feeds...", total=len(self.feeds))
for feed in self.feeds:
grab_entries(feed)
progress.update(task, advance=1, description=f"[green]Updated {feed.feed_url}")
class Command(BaseCommand):
help = "Check for new entries in feeds"
requires_migrations_checks = True
@no_translations
def handle(self, *args, **options) -> None: # noqa: ANN002, ANN003, ARG002
feeds = Feed.objects.filter(
Q(last_checked__lte=timezone.now() - timedelta(minutes=15)) | Q(last_checked__isnull=True),
)
domain_feeds = defaultdict(list)
for feed in feeds:
domain_feeds[feed.domain.pk].append(feed)
threads = []
progress = Progress()
for feeds in domain_feeds.values():
thread = DomainUpdater(feeds, progress)
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
console.log("[bold green]Successfully updated feeds")

View file

@ -1,206 +0,0 @@
# Generated by Django 5.0.3 on 2024-03-15 01:27
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Domain',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(unique=True)),
('name', models.CharField(max_length=255)),
('categories', models.JSONField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('hidden', models.BooleanField(default=False)),
('hidden_at', models.DateTimeField(blank=True, null=True)),
('hidden_reason', models.TextField(blank=True)),
],
options={
'verbose_name': 'Domain',
'verbose_name_plural': 'Domains',
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Author',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('name', models.TextField(blank=True)),
('href', models.TextField(blank=True)),
('email', models.TextField(blank=True)),
],
options={
'verbose_name': 'Author',
'verbose_name_plural': 'Authors',
'ordering': ['name'],
'unique_together': {('name', 'email', 'href')},
},
),
migrations.CreateModel(
name='Generator',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('name', models.TextField(blank=True)),
('href', models.TextField(blank=True)),
('version', models.TextField(blank=True)),
],
options={
'verbose_name': 'Feed generator',
'verbose_name_plural': 'Feed generators',
'ordering': ['name'],
'unique_together': {('name', 'version', 'href')},
},
),
migrations.CreateModel(
name='Links',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('rel', models.TextField(blank=True)),
('type', models.TextField(blank=True)),
('href', models.TextField(blank=True)),
('title', models.TextField(blank=True)),
],
options={
'verbose_name': 'Link',
'verbose_name_plural': 'Links',
'ordering': ['href'],
'unique_together': {('href', 'rel')},
},
),
migrations.CreateModel(
name='Publisher',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('name', models.TextField(blank=True)),
('href', models.TextField(blank=True)),
('email', models.TextField(blank=True)),
],
options={
'verbose_name': 'Publisher',
'verbose_name_plural': 'Publishers',
'ordering': ['name'],
'unique_together': {('name', 'email', 'href')},
},
),
migrations.CreateModel(
name='Feed',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('feed_url', models.URLField(unique=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('last_checked', models.DateTimeField(blank=True, null=True)),
('active', models.BooleanField(default=True)),
('bozo', models.BooleanField()),
('bozo_exception', models.TextField(blank=True)),
('encoding', models.TextField(blank=True)),
('etag', models.TextField(blank=True)),
('headers', models.JSONField(blank=True, null=True)),
('href', models.TextField(blank=True)),
('modified', models.DateTimeField(blank=True, null=True)),
('namespaces', models.JSONField(blank=True, null=True)),
('status', models.IntegerField()),
('version', models.CharField(blank=True, max_length=255)),
('author', models.TextField(blank=True)),
('cloud', models.JSONField(blank=True, null=True)),
('contributors', models.JSONField(blank=True, null=True)),
('docs', models.TextField(blank=True)),
('errorreportsto', models.TextField(blank=True)),
('generator', models.TextField(blank=True)),
('icon', models.TextField(blank=True)),
('_id', models.TextField(blank=True)),
('image', models.JSONField(blank=True, null=True)),
('info', models.TextField(blank=True)),
('info_detail', models.JSONField(blank=True, null=True)),
('language', models.TextField(blank=True)),
('license', models.TextField(blank=True)),
('link', models.TextField(blank=True)),
('links', models.JSONField(blank=True, null=True)),
('logo', models.TextField(blank=True)),
('published', models.TextField(blank=True)),
('published_parsed', models.DateTimeField(blank=True, null=True)),
('publisher', models.TextField(blank=True)),
('rights', models.TextField(blank=True)),
('rights_detail', models.JSONField(blank=True, null=True)),
('subtitle', models.TextField(blank=True)),
('subtitle_detail', models.JSONField(blank=True, null=True)),
('tags', models.JSONField(blank=True, null=True)),
('textinput', models.JSONField(blank=True, null=True)),
('title', models.TextField(blank=True)),
('title_detail', models.JSONField(blank=True, null=True)),
('ttl', models.TextField(blank=True)),
('updated', models.TextField(blank=True)),
('updated_parsed', models.DateTimeField(blank=True, null=True)),
('author_detail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='feeds', to='feedvault.author')),
('domain', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='feedvault.domain')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('generator_detail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='feeds', to='feedvault.generator')),
('publisher_detail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='feeds', to='feedvault.publisher')),
],
options={
'verbose_name': 'Feed',
'verbose_name_plural': 'Feeds',
'ordering': ['-created_at'],
},
),
migrations.CreateModel(
name='Entry',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('author', models.TextField(blank=True)),
('comments', models.TextField(blank=True)),
('content', models.JSONField(blank=True, null=True)),
('contributors', models.JSONField(blank=True, null=True)),
('created', models.TextField(blank=True)),
('created_parsed', models.DateTimeField(blank=True, null=True)),
('enclosures', models.JSONField(blank=True, null=True)),
('expired', models.TextField(blank=True)),
('expired_parsed', models.DateTimeField(blank=True, null=True)),
('_id', models.TextField(blank=True)),
('license', models.TextField(blank=True)),
('link', models.TextField(blank=True)),
('links', models.JSONField(blank=True, null=True)),
('published', models.TextField(blank=True)),
('published_parsed', models.DateTimeField(blank=True, null=True)),
('publisher', models.TextField(blank=True)),
('source', models.JSONField(blank=True, null=True)),
('summary', models.TextField(blank=True)),
('summary_detail', models.JSONField(blank=True, null=True)),
('tags', models.JSONField(blank=True, null=True)),
('title', models.TextField(blank=True)),
('title_detail', models.JSONField(blank=True, null=True)),
('updated', models.TextField(blank=True)),
('updated_parsed', models.DateTimeField(blank=True, null=True)),
('author_detail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='entries', to='feedvault.author')),
('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='feedvault.feed')),
('publisher_detail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='entries', to='feedvault.publisher')),
],
options={
'verbose_name': 'Entry',
'verbose_name_plural': 'Entries',
'ordering': ['-created_parsed'],
},
),
]

View file

@ -1,23 +0,0 @@
# Generated by Django 5.0.3 on 2024-03-15 16:42
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('feedvault', '0002_alter_feed_status'),
]
operations = [
migrations.RenameField(
model_name='entry',
old_name='_id',
new_name='entry_id',
),
migrations.RenameField(
model_name='feed',
old_name='_id',
new_name='feed_id',
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.0.3 on 2024-03-17 02:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('feedvault', '0003_rename__id_entry_entry_id_rename__id_feed_feed_id'),
]
operations = [
migrations.AlterField(
model_name='feed',
name='bozo',
field=models.BooleanField(default=False),
),
]

View file

@ -1,32 +0,0 @@
# Generated by Django 5.0.3 on 2024-03-17 03:19
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('feedvault', '0004_alter_feed_bozo'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserUploadedFile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(upload_to='uploads/')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('has_been_processed', models.BooleanField(default=False)),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Uploaded file',
'verbose_name_plural': 'Uploaded files',
'ordering': ['-created_at'],
},
),
]

View file

@ -1,25 +0,0 @@
# Generated by Django 5.0.3 on 2024-03-17 03:29
import feedvault.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('feedvault', '0005_useruploadedfile'),
]
operations = [
migrations.AddField(
model_name='useruploadedfile',
name='original_filename',
field=models.TextField(default='a', help_text='The original filename of the file.'),
preserve_default=False,
),
migrations.AlterField(
model_name='useruploadedfile',
name='file',
field=models.FileField(upload_to=feedvault.models.get_upload_path),
),
]

View file

@ -1,52 +0,0 @@
# Generated by Django 5.0.3 on 2024-03-17 16:00
import django.db.models.deletion
import feedvault.models
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('feedvault', '0006_useruploadedfile_original_filename_and_more'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='useruploadedfile',
name='description',
field=models.TextField(blank=True, help_text='Description added by user.'),
),
migrations.AddField(
model_name='useruploadedfile',
name='notes',
field=models.TextField(blank=True, help_text='Notes from admin.'),
),
migrations.AlterField(
model_name='useruploadedfile',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='The time the file was uploaded.'),
),
migrations.AlterField(
model_name='useruploadedfile',
name='file',
field=models.FileField(help_text='The file that was uploaded.', upload_to=feedvault.models.get_upload_path),
),
migrations.AlterField(
model_name='useruploadedfile',
name='has_been_processed',
field=models.BooleanField(default=False, help_text='Has the file content been added to the archive?'),
),
migrations.AlterField(
model_name='useruploadedfile',
name='modified_at',
field=models.DateTimeField(auto_now=True, help_text='The last time the file was modified.'),
),
migrations.AlterField(
model_name='useruploadedfile',
name='user',
field=models.ForeignKey(blank=True, help_text='The user that uploaded the file.', null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]

View file

@ -1,339 +0,0 @@
from __future__ import annotations
import logging
import typing
import uuid
from dataclasses import dataclass
from pathlib import Path
from typing import Literal
from django.db import models
from django.db.models import JSONField
logger: logging.Logger = logging.getLogger(__name__)
@dataclass
class FeedAddResult:
"""The result of adding a feed to the database."""
feed: Feed | None
created: bool
error: str | None
class Domain(models.Model):
"""A domain that has one or more feeds."""
url = models.URLField(unique=True)
name = models.CharField(max_length=255)
categories = models.JSONField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
hidden = models.BooleanField(default=False)
hidden_at = models.DateTimeField(null=True, blank=True)
hidden_reason = models.TextField(blank=True)
class Meta:
"""Meta information for the domain model."""
ordering: typing.ClassVar[list[str]] = ["name"]
verbose_name: str = "Domain"
verbose_name_plural: str = "Domains"
def __str__(self) -> str:
"""Return string representation of the domain."""
if_hidden: Literal[" (hidden)", ""] = " (hidden)" if self.hidden else ""
return self.name + if_hidden
def get_absolute_url(self) -> str:
"""Return the absolute URL of the domain."""
return f"/domain/{self.pk}/"
class Author(models.Model):
"""An author of an entry."""
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
name = models.TextField(blank=True)
href = models.TextField(blank=True)
email = models.TextField(blank=True)
class Meta:
"""Meta information for the author model."""
unique_together: typing.ClassVar[list[str]] = ["name", "email", "href"]
ordering: typing.ClassVar[list[str]] = ["name"]
verbose_name: str = "Author"
verbose_name_plural: str = "Authors"
def __str__(self) -> str:
"""Return string representation of the author."""
return f"{self.name} - {self.email} - {self.href}"
class Generator(models.Model):
"""What program or service generated the feed."""
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
name = models.TextField(blank=True)
href = models.TextField(blank=True)
version = models.TextField(blank=True)
class Meta:
"""Meta information for the generator model."""
unique_together: typing.ClassVar[list[str]] = ["name", "version", "href"]
ordering: typing.ClassVar[list[str]] = ["name"]
verbose_name: str = "Feed generator"
verbose_name_plural: str = "Feed generators"
def __str__(self) -> str:
"""Return string representation of the generator."""
return self.name
class Links(models.Model):
"""A link to a feed or entry."""
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
rel = models.TextField(blank=True)
type = models.TextField(blank=True)
href = models.TextField(blank=True)
title = models.TextField(blank=True)
class Meta:
"""Meta information for the links model."""
unique_together: typing.ClassVar[list[str]] = ["href", "rel"]
ordering: typing.ClassVar[list[str]] = ["href"]
verbose_name: str = "Link"
verbose_name_plural: str = "Links"
def __str__(self) -> str:
"""Return string representation of the links."""
return self.href
class Publisher(models.Model):
"""The publisher of a feed or entry."""
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
name = models.TextField(blank=True)
href = models.TextField(blank=True)
email = models.TextField(blank=True)
class Meta:
"""Meta information for the publisher model."""
unique_together: typing.ClassVar[list[str]] = ["name", "email", "href"]
ordering: typing.ClassVar[list[str]] = ["name"]
verbose_name: str = "Publisher"
verbose_name_plural: str = "Publishers"
def __str__(self) -> str:
"""Return string representation of the publisher."""
return self.name
class Feed(models.Model):
"""A RSS/Atom/JSON feed."""
feed_url = models.URLField(unique=True)
# The user that added the feed
user = models.ForeignKey("auth.User", on_delete=models.SET_NULL, null=True, blank=True)
domain = models.ForeignKey(Domain, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
last_checked = models.DateTimeField(null=True, blank=True)
active = models.BooleanField(default=True)
# General data
bozo = models.BooleanField(default=False)
bozo_exception = models.TextField(blank=True)
encoding = models.TextField(blank=True)
etag = models.TextField(blank=True)
headers = JSONField(null=True, blank=True)
href = models.TextField(blank=True)
modified = models.DateTimeField(null=True, blank=True)
namespaces = JSONField(null=True, blank=True)
status = models.IntegerField(null=True)
version = models.CharField(max_length=255, blank=True)
# Feed data
author = models.TextField(blank=True)
author_detail = models.ForeignKey(
Author,
on_delete=models.PROTECT,
null=True,
blank=True,
related_name="feeds",
)
cloud = JSONField(null=True, blank=True)
contributors = JSONField(null=True, blank=True)
docs = models.TextField(blank=True)
errorreportsto = models.TextField(blank=True)
generator = models.TextField(blank=True)
generator_detail = models.ForeignKey(
Generator,
on_delete=models.PROTECT,
null=True,
blank=True,
related_name="feeds",
)
icon = models.TextField(blank=True)
feed_id = models.TextField(blank=True)
image = JSONField(null=True, blank=True)
info = models.TextField(blank=True)
info_detail = JSONField(null=True, blank=True)
language = models.TextField(blank=True)
license = models.TextField(blank=True)
link = models.TextField(blank=True)
links = JSONField(null=True, blank=True)
logo = models.TextField(blank=True)
published = models.TextField(blank=True)
published_parsed = models.DateTimeField(null=True, blank=True)
publisher = models.TextField(blank=True)
publisher_detail = models.ForeignKey(
Publisher,
on_delete=models.PROTECT,
null=True,
blank=True,
related_name="feeds",
)
rights = models.TextField(blank=True)
rights_detail = JSONField(null=True, blank=True)
subtitle = models.TextField(blank=True)
subtitle_detail = JSONField(null=True, blank=True)
tags = JSONField(null=True, blank=True)
textinput = JSONField(null=True, blank=True)
title = models.TextField(blank=True)
title_detail = JSONField(null=True, blank=True)
ttl = models.TextField(blank=True)
updated = models.TextField(blank=True)
updated_parsed = models.DateTimeField(null=True, blank=True)
class Meta:
"""Meta information for the feed model."""
ordering: typing.ClassVar[list[str]] = ["-created_at"]
verbose_name: str = "Feed"
verbose_name_plural: str = "Feeds"
def __str__(self) -> str:
"""Return string representation of the feed."""
return f"{self.domain} - {self.title}"
def get_absolute_url(self) -> str:
"""Return the absolute URL of the feed."""
return f"/feed/{self.pk}/"
class Entry(models.Model):
"""Each feed has multiple entries."""
feed = models.ForeignKey(Feed, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
# Entry data
author = models.TextField(blank=True)
author_detail = models.ForeignKey(
Author,
on_delete=models.PROTECT,
null=True,
blank=True,
related_name="entries",
)
comments = models.TextField(blank=True)
content = JSONField(null=True, blank=True)
contributors = JSONField(null=True, blank=True)
created = models.TextField(blank=True)
created_parsed = models.DateTimeField(null=True, blank=True)
enclosures = JSONField(null=True, blank=True)
expired = models.TextField(blank=True)
expired_parsed = models.DateTimeField(null=True, blank=True)
entry_id = models.TextField(blank=True)
license = models.TextField(blank=True)
link = models.TextField(blank=True)
links = JSONField(null=True, blank=True)
published = models.TextField(blank=True)
published_parsed = models.DateTimeField(null=True, blank=True)
publisher = models.TextField(blank=True)
publisher_detail = models.ForeignKey(
Publisher,
on_delete=models.PROTECT,
null=True,
blank=True,
related_name="entries",
)
source = JSONField(null=True, blank=True)
summary = models.TextField(blank=True)
summary_detail = JSONField(null=True, blank=True)
tags = JSONField(null=True, blank=True)
title = models.TextField(blank=True)
title_detail = JSONField(null=True, blank=True)
updated = models.TextField(blank=True)
updated_parsed = models.DateTimeField(null=True, blank=True)
class Meta:
"""Meta information for the entry model."""
ordering: typing.ClassVar[list[str]] = ["-created_parsed"]
verbose_name: str = "Entry"
verbose_name_plural: str = "Entries"
def __str__(self) -> str:
"""Return string representation of the entry."""
return f"{self.feed.feed_url} - {self.title}"
def get_upload_path(instance: UserUploadedFile, filename: str) -> str:
"""Don't save the file with the original filename."""
ext: str = Path(filename).suffix
filename = f"{uuid.uuid4()}{ext}" # For example: 51dc07a7-a299-473c-a737-1ef16bc71609.opml
return f"uploads/{instance.user.id}/{filename}" # type: ignore # noqa: PGH003
class UserUploadedFile(models.Model):
"""A file uploaded to the server by a user."""
file = models.FileField(upload_to=get_upload_path, help_text="The file that was uploaded.")
original_filename = models.TextField(help_text="The original filename of the file.")
created_at = models.DateTimeField(auto_now_add=True, help_text="The time the file was uploaded.")
modified_at = models.DateTimeField(auto_now=True, help_text="The last time the file was modified.")
user = models.ForeignKey(
"auth.User",
on_delete=models.SET_NULL,
null=True,
blank=True,
help_text="The user that uploaded the file.",
)
has_been_processed = models.BooleanField(default=False, help_text="Has the file content been added to the archive?")
description = models.TextField(blank=True, help_text="Description added by user.")
notes = models.TextField(blank=True, help_text="Notes from admin.")
class Meta:
"""Meta information for the uploaded file model."""
ordering: typing.ClassVar[list[str]] = ["-created_at"]
verbose_name: str = "Uploaded file"
verbose_name_plural: str = "Uploaded files"
def __str__(self) -> str:
return f"{self.original_filename} - {self.created_at}"
def get_absolute_url(self) -> str:
"""Return the absolute URL of the uploaded file.
Note that you will need to be logged in to access the file.
"""
return f"/download/{self.pk}"

View file

@ -4,22 +4,29 @@ import os
import sys
from pathlib import Path
from django.utils import timezone
from dotenv import find_dotenv, load_dotenv
load_dotenv(dotenv_path=find_dotenv(), verbose=True)
# Is True when running tests, used for not spamming Discord when new users are created
TESTING: bool = len(sys.argv) > 1 and sys.argv[1] == "test"
DEBUG: bool = os.getenv(key="DEBUG", default="True").lower() == "true"
BASE_DIR: Path = Path(__file__).resolve().parent.parent
DEBUG: bool = os.getenv(key="DEBUG", default="True").lower() == "true"
SECRET_KEY: str = os.getenv("SECRET_KEY", default="")
ROOT_URLCONF = "feedvault.urls"
TIME_ZONE = "Europe/Stockholm"
LANGUAGE_CODE = "en-us"
USE_I18N = True
USE_TZ = True
ADMINS: list[tuple[str, str]] = [("Joakim Hellsén", "django@feedvault.se")]
ADMINS: list[tuple[str, str]] = [("Joakim Hellsén", "tlovinator@gmail.com")]
ALLOWED_HOSTS: list[str] = [".feedvault.se", ".localhost", "127.0.0.1"]
USE_X_FORWARDED_HOST = True
INTERNAL_IPS: list[str] = ["127.0.0.1", "localhost", "192.168.1.143"]
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
WSGI_APPLICATION = "feedvault.wsgi.application"
ROOT_URLCONF = "feedvault.urls"
SITE_ID = 1
if not DEBUG:
CSRF_COOKIE_DOMAIN = ".feedvault.se"
@ -38,16 +45,14 @@ EMAIL_TIMEOUT = 10
DEFAULT_FROM_EMAIL: str = os.getenv(key="EMAIL_HOST_USER", default="webmaster@localhost")
SERVER_EMAIL: str = os.getenv(key="EMAIL_HOST_USER", default="webmaster@localhost")
USE_X_FORWARDED_HOST = True
INTERNAL_IPS: list[str] = ["127.0.0.1", "localhost", "192.168.1.143"]
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
SITE_ID = 1
STATIC_URL = "static/"
STATIC_ROOT: Path = BASE_DIR / "staticfiles"
STATIC_ROOT.mkdir(parents=True, exist_ok=True)
STATICFILES_DIRS: list[Path] = [BASE_DIR / "static"]
for static_dir in STATICFILES_DIRS:
static_dir.mkdir(parents=True, exist_ok=True)
MEDIA_URL = "media/"
MEDIA_ROOT: Path = BASE_DIR / "media"
@ -55,7 +60,7 @@ MEDIA_ROOT.mkdir(parents=True, exist_ok=True)
INSTALLED_APPS: list[str] = [
"feedvault.apps.FeedVaultConfig",
"feeds.apps.FeedsConfig",
"debug_toolbar",
"django.contrib.auth",
"whitenoise.runserver_nostatic",
@ -80,38 +85,38 @@ MIDDLEWARE: list[str] = [
"django_htmx.middleware.HtmxMiddleware",
]
DATABASE_PATH: str = os.getenv("DATABASE_PATH", "/data")
DATABASES = {
# TODO(TheLovinator): #1 Use unix socket for postgres in production
# https://github.com/TheLovinator1/feedvault.se/issues/1
DATABASES: dict[str, dict[str, str]] = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": Path(DATABASE_PATH) / "feedvault.sqlite3",
"ATOMIC_REQUESTS": True,
"OPTIONS": {
"timeout": 30,
"transaction_mode": "IMMEDIATE",
"init_command": "PRAGMA journal_mode=WAL;",
},
"ENGINE": "django.db.backends.postgresql",
"NAME": os.getenv("DB_NAME", ""),
"USER": os.getenv("DB_USER", ""),
"PASSWORD": os.getenv("DB_PASSWORD", ""),
"HOST": os.getenv("DB_HOST", ""),
"PORT": os.getenv("DB_PORT", ""),
},
}
if not DEBUG:
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
SESSION_CACHE_ALIAS: str = "default"
# Password validation
# https://docs.djangoproject.com/en/5.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS: list[dict[str, str]] = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
# TODO(TheLovinator): #2 Use unix socket for redis in production
# https://github.com/TheLovinator1/feedvault.se/issues/2
REDIS_LOCATION: str = f"redis://{os.getenv('REDIS_HOST', "")}:{os.getenv('REDIS_PORT', "")}/1"
CACHES: dict[str, dict[str, str | dict[str, str]]] = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": REDIS_LOCATION,
"KEY_PREFIX": "feedvault-dev" if DEBUG else "feedvault",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"PARSER_CLASS": "redis.connection._HiredisParser",
"PASSWORD": os.getenv("REDIS_PASSWORD", ""),
},
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
}
# A list containing the settings for all template engines to be used with Django.
TEMPLATES = [
@ -139,11 +144,6 @@ TEMPLATES = [
},
]
# Create data/logs folder if it doesn't exist
log_folder: Path = BASE_DIR / "data" / "logs"
log_folder.mkdir(parents=True, exist_ok=True)
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
@ -152,15 +152,10 @@ LOGGING = {
"level": "DEBUG",
"class": "logging.StreamHandler",
},
"file": {
"level": "DEBUG",
"class": "logging.FileHandler",
"filename": BASE_DIR / "data" / "logs" / f"{timezone.now().strftime('%Y%m%d')}.log",
},
},
"loggers": {
"django": {
"handlers": ["console", "file"],
"handlers": ["console"],
"level": "INFO",
"propagate": True,
},
@ -170,7 +165,7 @@ LOGGING = {
"propagate": True,
},
"": {
"handlers": ["console", "file"],
"handlers": ["console"],
"level": "DEBUG",
"propagate": True,
},
@ -187,3 +182,18 @@ STORAGES: dict[str, dict[str, str]] = {
else "whitenoise.storage.CompressedManifestStaticFilesStorage",
},
}
AUTH_PASSWORD_VALIDATORS: list[dict[str, str]] = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]

View file

@ -1,39 +0,0 @@
from __future__ import annotations
import logging
import os
from typing import TYPE_CHECKING
from discord_webhook import DiscordWebhook
from django.conf import settings
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
if TYPE_CHECKING:
from requests import Response
logger: logging.Logger = logging.getLogger(__name__)
@receiver(post_save, sender=User)
def notify_when_new_user(sender: User, instance: User, *, created: bool, **kwargs) -> None: # noqa: ANN003
"""Send a Discord notification when a new user is created.
Args:
sender: The User model.
instance: The instance of the sender.
created: A boolean indicating if the instance was created.
**kwargs: Arbitrary keyword arguments.
"""
if created:
webhook_url: str | None = os.getenv("DISCORD_WEBHOOK_URL")
if not webhook_url:
logger.error("Discord webhook URL not found.")
return
msg: str = f"New user registered on FeedVault 👀: {instance.username}"
webhook = DiscordWebhook(url=webhook_url, content=msg)
if not settings.TESTING:
response: Response = webhook.execute()
logger.info("Discord notification sent: (%s) %s", response.status_code, response.text)

View file

@ -1,284 +0,0 @@
from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING
from django.contrib.auth.models import User
from django.http.response import HttpResponse
from django.test import Client, TestCase
from django.urls import reverse
from feedvault.models import Domain, Entry, Feed, UserUploadedFile
if TYPE_CHECKING:
from django.http import HttpResponse
class TestIndexPage(TestCase):
def test_index_page(self) -> None:
"""Test if the index page is accessible."""
response: HttpResponse = self.client.get(reverse("index"))
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
response: HttpResponse = self.client.get("/")
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
class TestFeedPage(TestCase):
def setUp(self) -> None:
"""Create a test feed."""
self.domain: Domain = Domain.objects.create(
name="feedvault",
url="feedvault.se",
)
self.user: User = User.objects.create_user(
username="testuser",
email="hello@feedvault.se",
password="testpassword", # noqa: S106
)
self.feed: Feed = Feed.objects.create(
user=self.user,
bozo=False,
feed_url="https://feedvault.se/feed.xml",
domain=self.domain,
)
def test_feed_page(self) -> None:
"""Test if the feed page is accessible."""
feed_id = self.feed.pk
response: HttpResponse = self.client.get(reverse("feed", kwargs={"feed_id": feed_id}))
assert response.status_code == 200, f"Expected 200, got {response.status_code}. {response.content}"
def test_feed_page_not_found(self) -> None:
"""Test if the feed page is accessible."""
feed_id = self.feed.pk + 1
response: HttpResponse = self.client.get(reverse("feed", kwargs={"feed_id": feed_id}))
assert response.status_code == 404, f"Expected 404, got {response.status_code}. {response.content}"
class TestFeedsPage(TestCase):
def test_feeds_page(self) -> None:
"""Test if the feeds page is accessible."""
response: HttpResponse = self.client.get(reverse("feeds"))
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
class TestAddPage(TestCase):
def setUp(self) -> None:
"""Create a test user."""
self.user: User = User.objects.create_user(
username="testuser",
email="hello@feedvault.se",
password="testpassword", # noqa: S106
)
self.client.force_login(user=self.user)
def test_add_page(self) -> None:
"""Test if the add page is accessible."""
response: HttpResponse = self.client.post(reverse("add"), {"urls": "https://feedvault.se/feed.xml"})
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
class TestUploadPage(TestCase):
def setUp(self) -> None:
"""Create a test user."""
self.user: User = User.objects.create_user(
username="testuser",
email="hello@feedvault.se",
password="testpassword", # noqa: S106
)
self.client.force_login(user=self.user)
def test_upload_page(self) -> None:
"""Test if the upload page is accessible."""
# Check the amounts of files in the database
assert UserUploadedFile.objects.count() == 0, f"Expected 0, got {UserUploadedFile.objects.count()}"
# Open this file and upload it
current_file = __file__
with Path(current_file).open("rb") as file:
response: HttpResponse = self.client.post(reverse("upload"), {"file": file})
assert response.status_code == 200, f"Expected 200, got {response.status_code}: {response.content}"
# Check if the file is in the database
assert UserUploadedFile.objects.count() == 1, f"Expected 1, got {UserUploadedFile.objects.count()}"
class TestRobotsPage(TestCase):
def test_robots_page(self) -> None:
"""Test if the robots page is accessible."""
response: HttpResponse = self.client.get(reverse("robots"))
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
def test_robots_page_content(self) -> None:
"""Test if the robots page contains the expected content."""
response: HttpResponse = self.client.get(reverse("robots"))
assert (
response.content
== b"User-agent: *\nDisallow: /add\nDisallow: /upload\nDisallow: /accounts/\n\nSitemap: https://feedvault.se/sitemap.xml"
), f"Expected b'User-agent: *\nDisallow: /add\nDisallow: /upload\nDisallow: /accounts/\n\nSitemap: https://feedvault.se/sitemap.xml', got {response.content}" # noqa: E501
class TestDomains(TestCase):
def test_domains_page(self) -> None:
"""Test if the domains page is accessible."""
response: HttpResponse = self.client.get(reverse("domains"))
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
class TestAPI(TestCase):
def test_api_page(self) -> None:
"""Test if the API page is accessible."""
response: HttpResponse = self.client.get(reverse("api_v1:openapi-view"))
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
class TestAPIFeeds(TestCase):
def test_api_feeds_page(self) -> None:
"""Test if the API feeds page is accessible."""
response: HttpResponse = self.client.get(reverse("api_v1:list_feeds"))
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
class FeedVaultAPITests(TestCase):
def setUp(self) -> None:
# Set up data for the whole TestCase
self.client = Client()
# Creating a domain instance
self.domain: Domain = Domain.objects.create(name="Example Domain")
# Creating a feed instance
self.feed: Feed = Feed.objects.create(title="Example Feed", domain=self.domain, bozo=False)
# Creating entry instances
self.entry1: Entry = Entry.objects.create(title="Example Entry 1", feed=self.feed)
self.entry2: Entry = Entry.objects.create(title="Example Entry 2", feed=self.feed)
def test_list_feeds(self) -> None:
response: HttpResponse = self.client.get("/api/v1/feeds/")
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
assert "Example Feed" in response.content.decode()
def test_get_feed(self) -> None:
response: HttpResponse = self.client.get(f"/api/v1/feeds/{self.feed.pk}/")
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
assert "Example Feed" in response.content.decode()
def test_list_entries(self) -> None:
response: HttpResponse = self.client.get(f"/api/v1/feeds/{self.feed.pk}/entries/")
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
assert "Example Entry 1" in response.content.decode()
assert "Example Entry 2" in response.content.decode()
def test_get_entry(self) -> None:
response: HttpResponse = self.client.get(f"/api/v1/entries/{self.entry1.pk}/")
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
assert "Example Entry 1" in response.content.decode()
def test_list_domains(self) -> None:
response: HttpResponse = self.client.get("/api/v1/domains/")
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
assert "Example Domain" in response.content.decode()
def test_get_domain(self) -> None:
response: HttpResponse = self.client.get(f"/api/v1/domains/{self.domain.pk}/")
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
assert "Example Domain" in response.content.decode()
class TestAccount(TestCase):
def test_login_page(self) -> None:
"""Test if the login page is accessible."""
response: HttpResponse = self.client.get(reverse("login"))
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
def test_register_page(self) -> None:
"""Test if the register page is accessible."""
response: HttpResponse = self.client.get(reverse("register"))
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
class TestLogoutPage(TestCase):
def setUp(self) -> None:
"""Create a test user."""
self.user: User = User.objects.create_user(
username="testuser",
email="hello@feedvault.se",
password="testpassword", # noqa: S106
)
self.client.force_login(user=self.user)
def test_logout_page(self) -> None:
"""Test if the logout page is accessible."""
response: HttpResponse = self.client.post(reverse("logout"))
assert response.status_code == 302, f"Expected 300, got {response.status_code}"
# Check if the user is logged out
response: HttpResponse = self.client.get(reverse("index"))
assert response.status_code == 200
assert "testuser" not in response.content.decode(
"utf-8",
), f"Expected 'testuser' not in response, got {response.content}"
class TestSitemap(TestCase):
def test_sitemap(self) -> None:
"""Test if the sitemap is accessible."""
response: HttpResponse = self.client.get(reverse("django.contrib.sitemaps.views.sitemap"))
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
assert "urlset" in response.content.decode(), f"Expected 'urlset' in response, got {response.content}"
response2 = self.client.get("/sitemap.xml")
assert response2.status_code == 200, f"Expected 200, got {response2.status_code}"
assert "urlset" in response2.content.decode(), f"Expected 'urlset' in response, got {response2.content}"
class TestSearch(TestCase):
def setUp(self) -> None:
"""Create a test feed."""
self.domain: Domain = Domain.objects.create(
name="feedvault",
url="feedvault.se",
)
self.user: User = User.objects.create_user(
username="testuser",
email="hello@feedvault.se",
password="testpassword", # noqa: S106
)
self.feed: Feed = Feed.objects.create(
user=self.user,
bozo=False,
feed_url="https://feedvault.se/feed.xml",
domain=self.domain,
)
def test_search_page(self) -> None:
"""Test if the search page is accessible."""
response: HttpResponse = self.client.get(reverse("search"))
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
def test_search_page_search(self) -> None:
"""Search for a term that doesn't exist."""
response: HttpResponse = self.client.get(reverse("search"), {"q": "test"})
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
assert (
"No results found" in response.content.decode()
), f"Expected 'No results found' in response, got {response.content}"
def test_search_page_search_found(self) -> None:
"""Search for a term that exists."""
response: HttpResponse = self.client.get(reverse("search"), {"q": "feedvault"})
assert response.status_code == 200, f"Expected 200, got {response.status_code}"
assert "feedvault" in response.content.decode(), f"Expected 'feedvault' in response, got {response.content}"
def test_search_page_search_empty(self) -> None:
"""Search for an empty term. This should redirect to the feeds page."""
response: HttpResponse = self.client.get(reverse("search"), {"q": ""})
assert response.status_code == 200, f"Expected 302, got {response.status_code}"
assert (
"Latest Feeds" in response.content.decode()
), f"Expected 'Latest Feeds' in response, got {response.content}"

View file

@ -1,34 +1,24 @@
from __future__ import annotations
from django.contrib.sitemaps import GenericSitemap
from django.contrib.sitemaps import GenericSitemap, Sitemap
from django.contrib.sitemaps.views import sitemap
from django.urls import include, path
from django.views.decorators.cache import cache_page
from feeds.models import Feed
from feedvault import views
from feedvault.api import api_v1
from feedvault.models import Domain, Feed
from feedvault.sitemaps import StaticViewSitemap
from feedvault.views import CustomLoginView, CustomLogoutView, ProfileView, RegisterView
app_name: str = "feedvault"
sitemaps = {
sitemaps: dict[str, type[Sitemap] | Sitemap] = {
"static": StaticViewSitemap,
"feeds": GenericSitemap({"queryset": Feed.objects.all(), "date_field": "created_at"}),
"domains": GenericSitemap({"queryset": Domain.objects.all(), "date_field": "created_at"}),
}
urlpatterns: list = [
path(route="", view=views.IndexView.as_view(), name="index"),
path(route="", view=include("feeds.urls")),
path("__debug__/", include("debug_toolbar.urls")),
path(route="feed/<int:feed_id>/", view=views.FeedView.as_view(), name="feed"),
path(route="feeds/", view=views.FeedsView.as_view(), name="feeds"),
path(route="add/", view=views.AddView.as_view(), name="add"),
path(route="upload/", view=views.UploadView.as_view(), name="upload"),
path(route="download/", view=views.DownloadView.as_view(), name="download"),
path(route="delete_upload/", view=views.DeleteUploadView.as_view(), name="delete_upload"),
path(route="edit_description/", view=views.EditDescriptionView.as_view(), name="edit_description"),
path(route="robots.txt", view=cache_page(timeout=60 * 60 * 365)(views.RobotsView.as_view()), name="robots"),
path(
"sitemap.xml",
@ -36,13 +26,4 @@ urlpatterns: list = [
{"sitemaps": sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(route="search/", view=views.SearchView.as_view(), name="search"),
path(route="domains/", view=views.DomainsView.as_view(), name="domains"),
path(route="domain/<int:domain_id>/", view=views.DomainView.as_view(), name="domain"),
path("api/v1/", api_v1.urls), # type: ignore # noqa: PGH003
path(route="accounts/login/", view=CustomLoginView.as_view(), name="login"),
path(route="accounts/register/", view=RegisterView.as_view(), name="register"),
path(route="accounts/logout/", view=CustomLogoutView.as_view(), name="logout"),
# path(route="accounts/change-password/", view=CustomPasswordChangeView.as_view(), name="change_password"),
path(route="accounts/profile/", view=ProfileView.as_view(), name="profile"),
]

View file

@ -1,478 +1,13 @@
from __future__ import annotations
import logging
from mimetypes import guess_type
from pathlib import Path
from typing import TYPE_CHECKING, Any, ClassVar
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import login
from django.contrib.auth.forms import AuthenticationForm, UserCreationForm
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.views import LoginView, LogoutView, PasswordChangeView
from django.contrib.messages.views import SuccessMessageMixin
from django.core.exceptions import SuspiciousOperation
from django.core.paginator import EmptyPage, Page, Paginator
from django.db.models.manager import BaseManager
from django.http import FileResponse, Http404, HttpRequest, HttpResponse
from django.shortcuts import get_object_or_404, render
from django.template import loader
from django.urls import reverse_lazy
from django.http import HttpRequest, HttpResponse
from django.views import View
from django.views.generic.edit import CreateView
from feedvault.feeds import add_url
from feedvault.models import Domain, Entry, Feed, FeedAddResult, UserUploadedFile
if TYPE_CHECKING:
from django.contrib.auth.models import User
from django.core.files.uploadedfile import UploadedFile
from django.db.models.manager import BaseManager
logger: logging.Logger = logging.getLogger(__name__)
class HtmxHttpRequest(HttpRequest):
htmx: Any
class IndexView(View):
"""Index path."""
def get(self, request: HttpRequest) -> HttpResponse:
"""Load the index page."""
template = loader.get_template(template_name="index.html")
context: dict[str, str] = {
"description": "FeedVault allows users to archive and search their favorite web feeds.",
"keywords": "feed, rss, atom, archive, rss list",
"author": "TheLovinator",
"canonical": "https://feedvault.se/",
"title": "FeedVault",
}
return HttpResponse(content=template.render(context=context, request=request))
class FeedView(View):
"""A single feed."""
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse: # noqa: ANN002, ANN003, ARG002
"""Load the feed page."""
feed_id = kwargs.get("feed_id", None)
if not feed_id:
return HttpResponse(content="No id", status=400)
feed: Feed = get_object_or_404(Feed, id=feed_id)
entries: BaseManager[Entry] = Entry.objects.filter(feed=feed).order_by("-created_parsed")[:100]
context = {
"feed": feed,
"entries": entries,
"description": f"Archive of {feed.href}",
"keywords": "feed, rss, atom, archive, rss list",
"author": f"{feed.author_detail.name if feed.author_detail else "FeedVault"}",
"canonical": f"https://feedvault.se/feed/{feed_id}/",
"title": f"{feed.title} - FeedVault",
}
return render(request, "feed.html", context)
class FeedsView(View):
"""All feeds."""
def get(self, request: HtmxHttpRequest) -> HttpResponse:
"""All feeds."""
feeds: BaseManager[Feed] = Feed.objects.only("id", "feed_url")
paginator = Paginator(object_list=feeds, per_page=100)
page_number = int(request.GET.get("page", default=1))
try:
pages: Page = paginator.get_page(page_number)
except EmptyPage:
return HttpResponse("")
context: dict[str, str | Page | int] = {
"feeds": pages,
"description": "An archive of web feeds",
"keywords": "feed, rss, atom, archive, rss list",
"author": "TheLovinator",
"canonical": "https://feedvault.se/feeds/",
"title": "Feeds",
"page": page_number,
}
template_name = "partials/feeds.html" if request.htmx else "feeds.html"
return render(request, template_name, context)
class AddView(LoginRequiredMixin, View):
"""Add a feed."""
def get(self, request: HttpRequest) -> HttpResponse:
"""Load the index page."""
template = loader.get_template(template_name="index.html")
context: dict[str, str] = {
"description": "FeedVault allows users to archive and search their favorite web feeds.",
"keywords": "feed, rss, atom, archive, rss list",
"author": "TheLovinator",
"canonical": "https://feedvault.se/",
}
return HttpResponse(content=template.render(context=context, request=request))
def post(self, request: HttpRequest) -> HttpResponse:
"""Add a feed."""
if not request.user.is_authenticated:
return HttpResponse(content="Not logged in", status=401)
if not request.user.is_active:
return HttpResponse(content="User is not active", status=403)
urls: str | None = request.POST.get("urls", None)
if not urls:
return HttpResponse(content="No urls", status=400)
# Split the urls by newline.
for url in urls.split("\n"):
feed_result: FeedAddResult = add_url(url, request.user)
feed: Feed | None = feed_result.feed
if not feed_result or not feed:
messages.error(request, f"{url} - Failed to add, {feed_result.error}")
continue
if feed_result.created:
messages.success(request, f"{feed.feed_url} added to queue")
else:
messages.warning(request, f"{feed.feed_url} already exists")
# Render the index page.
template = loader.get_template(template_name="index.html")
return HttpResponse(content=template.render(context={}, request=request))
class UploadView(LoginRequiredMixin, View):
"""Upload a file."""
def get(self, request: HttpRequest) -> HttpResponse:
"""Load the index page."""
template = loader.get_template(template_name="index.html")
context: dict[str, str] = {
"description": "FeedVault allows users to archive and search their favorite web feeds.",
"keywords": "feed, rss, atom, archive, rss list",
"author": "TheLovinator",
"canonical": "https://feedvault.se/",
}
return HttpResponse(content=template.render(context=context, request=request))
def post(self, request: HttpRequest) -> HttpResponse:
"""Upload a file."""
if not request.user.is_authenticated:
return HttpResponse(content="Not logged in", status=401)
if not request.user.is_active:
return HttpResponse(content="User is not active", status=403)
file: UploadedFile | None = request.FILES.get("file", None)
if not file:
return HttpResponse(content="No file", status=400)
# Save file to media folder
UserUploadedFile.objects.create(user=request.user, file=file, original_filename=file.name)
# Render the index page.
template = loader.get_template(template_name="index.html")
messages.success(request, f"{file.name} uploaded")
messages.info(
request,
"You can find your uploads on your profile page. Files will be parsed and added to the archive when possible. Thanks.", # noqa: E501
)
return HttpResponse(content=template.render(context={}, request=request))
class DeleteUploadView(LoginRequiredMixin, View):
"""Delete an uploaded file."""
def post(self, request: HttpRequest) -> HttpResponse:
"""Delete an uploaded file."""
file_id: str | None = request.POST.get("file_id", None)
if not file_id:
return HttpResponse("No file_id provided", status=400)
user_file: UserUploadedFile | None = UserUploadedFile.objects.filter(user=request.user, id=file_id).first()
if not user_file:
msg = "File not found"
raise Http404(msg)
user_upload_dir: Path = Path(settings.MEDIA_ROOT) / "uploads" / f"{request.user.id}" # type: ignore # noqa: PGH003
file_path: Path = user_upload_dir / Path(user_file.file.name).name
logger.debug("file_path: %s", file_path)
if not file_path.exists() or not file_path.is_file():
logger.error("User '%s' attempted to delete a file that does not exist: %s", request.user, file_path)
msg = "File not found"
raise Http404(msg)
if user_upload_dir not in file_path.parents:
logger.error(
"User '%s' attempted to delete a file that is not in their upload directory: %s",
request.user,
file_path,
)
msg = "Attempted unauthorized file access"
raise SuspiciousOperation(msg)
user_file.delete()
# Go back to the profile page
messages.success(request, f"{file_path.name} deleted")
return HttpResponse(status=204)
class EditDescriptionView(LoginRequiredMixin, View):
"""Edit the description of an uploaded file."""
def post(self, request: HttpRequest) -> HttpResponse:
"""Edit the description of an uploaded file."""
new_description: str | None = request.POST.get("description", None)
file_id: str | None = request.POST.get("file_id", None)
if not new_description:
return HttpResponse("No description provided", status=400)
if not file_id:
return HttpResponse("No file_id provided", status=400)
user_file: UserUploadedFile | None = UserUploadedFile.objects.filter(user=request.user, id=file_id).first()
if not user_file:
msg = "File not found"
raise Http404(msg)
user_upload_dir: Path = Path(settings.MEDIA_ROOT) / "uploads" / f"{request.user.id}" # type: ignore # noqa: PGH003
file_path: Path = user_upload_dir / Path(user_file.file.name).name
logger.debug("file_path: %s", file_path)
if not file_path.exists() or not file_path.is_file():
logger.error("User '%s' attempted to delete a file that does not exist: %s", request.user, file_path)
msg = "File not found"
raise Http404(msg)
if user_upload_dir not in file_path.parents:
logger.error(
"User '%s' attempted to delete a file that is not in their upload directory: %s",
request.user,
file_path,
)
msg = "Attempted unauthorized file access"
raise SuspiciousOperation(msg)
old_description: str = user_file.description
user_file.description = new_description
user_file.save()
logger.info(
"User '%s' updated the description of file '%s' from '%s' to '%s'",
request.user,
file_path,
old_description,
new_description,
)
return HttpResponse(content=new_description, status=200)
class DownloadView(LoginRequiredMixin, View):
"""Download a file."""
def get(self, request: HttpRequest) -> HttpResponse | FileResponse:
"""/download/?file_id=1."""
file_id: str | None = request.GET.get("file_id", None)
if not file_id:
return HttpResponse("No file_id provided", status=400)
user_file: UserUploadedFile | None = UserUploadedFile.objects.filter(user=request.user, id=file_id).first()
if not user_file:
msg = "File not found"
raise Http404(msg)
user_upload_dir: Path = Path(settings.MEDIA_ROOT) / "uploads" / f"{request.user.id}" # type: ignore # noqa: PGH003
file_path: Path = user_upload_dir / Path(user_file.file.name).name
if not file_path.exists() or not file_path.is_file():
msg = "File not found"
raise Http404(msg)
if user_upload_dir not in file_path.parents:
msg = "Attempted unauthorized file access"
raise SuspiciousOperation(msg)
content_type, _ = guess_type(file_path)
response = FileResponse(file_path.open("rb"), content_type=content_type or "application/octet-stream")
response["Content-Disposition"] = f'attachment; filename="{user_file.original_filename or file_path.name}"'
return response
class CustomLoginView(LoginView):
"""Custom login view."""
template_name = "accounts/login.html"
next_page = reverse_lazy("index")
def form_valid(self, form: AuthenticationForm) -> HttpResponse:
"""Check if the form is valid."""
user: User = form.get_user()
login(self.request, user)
return super().form_valid(form)
class RegisterView(CreateView):
"""Register view."""
template_name = "accounts/register.html"
form_class = UserCreationForm
success_url: str = reverse_lazy("login")
extra_context: ClassVar[dict[str, str]] = {
"title": "Register",
"description": "Register a new account",
"keywords": "register, account",
"author": "TheLovinator",
"canonical": "https://feedvault.se/accounts/register/",
}
class CustomLogoutView(LogoutView):
"""Logout view."""
next_page = reverse_lazy("login")
extra_context: ClassVar[dict[str, str]] = {
"title": "Logout",
"description": "Logout of your account",
"keywords": "logout, account",
"author": "TheLovinator",
"canonical": "https://feedvault.se/accounts/logout/",
}
class CustomPasswordChangeView(SuccessMessageMixin, PasswordChangeView):
"""Custom password change view."""
template_name = "accounts/change_password.html"
success_url = reverse_lazy("index")
success_message = "Your password was successfully updated!"
extra_context: ClassVar[dict[str, str]] = {
"title": "Change password",
"description": "Change your password",
"keywords": "change, password, account",
"author": "TheLovinator",
"canonical": "https://feedvault.se/accounts/change-password/",
}
class ProfileView(LoginRequiredMixin, View):
"""Profile page."""
def get(self, request: HttpRequest) -> HttpResponse:
"""Load the profile page."""
template = loader.get_template(template_name="accounts/profile.html")
# TODO(TheLovinator): Use htmx to load the feeds and uploads # noqa: TD003
user_feeds: BaseManager[Feed] = Feed.objects.filter(user=request.user).order_by("-created_at")[:100]
user_uploads: BaseManager[UserUploadedFile] = UserUploadedFile.objects.filter(user=request.user).order_by(
"-created_at",
)[:100]
context: dict[str, str | Any] = {
"description": f"Profile page for {request.user.get_username()}",
"keywords": f"profile, account, {request.user.get_username()}",
"author": f"{request.user.get_username()}",
"canonical": "https://feedvault.se/accounts/profile/",
"title": f"{request.user.get_username()}",
"user_feeds": user_feeds,
"user_uploads": user_uploads,
}
return HttpResponse(content=template.render(context=context, request=request))
class RobotsView(View):
"""Robots.txt view."""
def get(self, request: HttpRequest) -> HttpResponse: # noqa: ARG002
def get(self, request: HttpRequest) -> HttpResponse:
"""Load the robots.txt file."""
return HttpResponse(
content="User-agent: *\nDisallow: /add\nDisallow: /upload\nDisallow: /accounts/\n\nSitemap: https://feedvault.se/sitemap.xml",
content_type="text/plain",
)
class DomainsView(View):
"""All domains."""
def get(self: DomainsView, request: HtmxHttpRequest) -> HttpResponse:
"""Load the domains page."""
domains: BaseManager[Domain] = Domain.objects.only("id", "url", "created_at")
paginator = Paginator(object_list=domains, per_page=100)
page_number = int(request.GET.get("page", default=1))
try:
pages: Page = paginator.get_page(page_number)
except EmptyPage:
return HttpResponse("")
context: dict[str, str | Page | int] = {
"domains": pages,
"description": "Domains",
"keywords": "feed, rss, atom, archive, rss list",
"author": "TheLovinator",
"canonical": "https://feedvault.se/domains/",
"title": "Domains",
"page": page_number,
}
template_name = "partials/domains.html" if request.htmx else "domains.html"
return render(request, template_name, context)
class DomainView(View):
"""A single domain."""
def get(self: DomainView, request: HttpRequest, domain_id: int) -> HttpResponse:
"""Load the domain page."""
domain: Domain = get_object_or_404(Domain, id=domain_id)
feeds: BaseManager[Feed] = Feed.objects.filter(domain=domain).order_by("-created_at")[:100]
context = {
"domain": domain,
"feeds": feeds,
"description": f"Archive of {domain.name}",
"keywords": "feed, rss, atom, archive, rss list",
"author": "TheLovinator",
"canonical": f"https://feedvault.se/domain/{domain_id}/",
"title": f"{domain.name} - FeedVault",
}
return render(request, "domain.html", context)
class SearchView(View):
"""Search view."""
def get(self, request: HtmxHttpRequest) -> HttpResponse:
"""Load the search page."""
query: str | None = request.GET.get("q", None)
if not query:
return FeedsView().get(request)
# TODO(TheLovinator): #20 Search more fields
# https://github.com/TheLovinator1/FeedVault/issues/20
feeds: BaseManager[Feed] = Feed.objects.filter(feed_url__icontains=query).order_by("-created_at")[:100]
context = {
"feeds": feeds,
"description": f"Search results for {query}",
"keywords": f"feed, rss, atom, archive, rss list, {query}",
"author": "TheLovinator",
"canonical": f"https://feedvault.se/search/?q={query}",
"title": f"Search results for {query}",
"query": query,
}
return render(request, "search.html", context)

824
poetry.lock generated
View file

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "annotated-types"
@ -25,6 +25,27 @@ files = [
[package.extras]
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
[[package]]
name = "beautifulsoup4"
version = "4.12.3"
description = "Screen-scraping library"
optional = false
python-versions = ">=3.6.0"
files = [
{file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"},
{file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"},
]
[package.dependencies]
soupsieve = ">1.2"
[package.extras]
cchardet = ["cchardet"]
chardet = ["chardet"]
charset-normalizer = ["charset-normalizer"]
html5lib = ["html5lib"]
lxml = ["lxml"]
[[package]]
name = "brotli"
version = "1.1.0"
@ -128,6 +149,17 @@ files = [
{file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
]
[[package]]
name = "cfgv"
version = "3.4.0"
description = "Validate configuration and produce human readable error messages."
optional = false
python-versions = ">=3.8"
files = [
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
]
[[package]]
name = "charset-normalizer"
version = "3.3.2"
@ -290,33 +322,29 @@ fasttext = ["fasttext"]
langdetect = ["langdetect"]
[[package]]
name = "discord-webhook"
version = "1.3.1"
description = "Easily send Discord webhooks with Python"
name = "distlib"
version = "0.3.8"
description = "Distribution utilities"
optional = false
python-versions = ">=3.10,<4.0"
python-versions = "*"
files = [
{file = "discord_webhook-1.3.1-py3-none-any.whl", hash = "sha256:ede07028316de76d24eb811836e2b818b2017510da786777adcb0d5970e7af79"},
{file = "discord_webhook-1.3.1.tar.gz", hash = "sha256:ee3e0f3ea4f3dc8dc42be91f75b894a01624c6c13fea28e23ebcf9a6c9a304f7"},
{file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
{file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
]
[package.dependencies]
requests = ">=2.28.1,<3.0.0"
[package.extras]
async = ["httpx (>=0.23.0,<0.24.0)"]
[[package]]
name = "Django"
version = "5.1.dev20240327194041"
name = "django"
version = "5.0.6"
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
optional = false
python-versions = ">=3.10"
files = []
develop = false
files = [
{file = "Django-5.0.6-py3-none-any.whl", hash = "sha256:8363ac062bb4ef7c3f12d078f6fa5d154031d129a15170a1066412af49d30905"},
{file = "Django-5.0.6.tar.gz", hash = "sha256:ff1b61005004e476e0aeea47c7f79b85864c70124030e95146315396f1e7951f"},
]
[package.dependencies]
asgiref = ">=3.7.0"
asgiref = ">=3.7.0,<4"
sqlparse = ">=0.3.1"
tzdata = {version = "*", markers = "sys_platform == \"win32\""}
@ -324,12 +352,6 @@ tzdata = {version = "*", markers = "sys_platform == \"win32\""}
argon2 = ["argon2-cffi (>=19.1.0)"]
bcrypt = ["bcrypt"]
[package.source]
type = "git"
url = "https://github.com/django/django.git"
reference = "HEAD"
resolved_reference = "944745afe2ec45aed30cef799c250107f1364ca7"
[[package]]
name = "django-debug-toolbar"
version = "4.3.0"
@ -380,6 +402,27 @@ dev = ["pre-commit"]
doc = ["markdown-include", "mkdocs", "mkdocs-material", "mkdocstrings"]
test = ["django-stubs", "mypy (==1.7.1)", "psycopg2-binary", "pytest", "pytest-asyncio", "pytest-cov", "pytest-django", "ruff (==0.1.7)"]
[[package]]
name = "django-redis"
version = "5.4.0"
description = "Full featured redis cache backend for Django."
optional = false
python-versions = ">=3.6"
files = [
{file = "django-redis-5.4.0.tar.gz", hash = "sha256:6a02abaa34b0fea8bf9b707d2c363ab6adc7409950b2db93602e6cb292818c42"},
{file = "django_redis-5.4.0-py3-none-any.whl", hash = "sha256:ebc88df7da810732e2af9987f7f426c96204bf89319df4c6da6ca9a2942edd5b"},
]
[package.dependencies]
Django = ">=3.2"
redis = [
{version = ">=3,<4.0.0 || >4.0.0,<4.0.1 || >4.0.1"},
{version = ">=3,<4.0.0 || >4.0.0,<4.0.1 || >4.0.1", extras = ["hiredis"], optional = true, markers = "extra == \"hiredis\""},
]
[package.extras]
hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"]
[[package]]
name = "djlint"
version = "1.34.1"
@ -428,6 +471,22 @@ files = [
[package.dependencies]
sgmllib3k = "*"
[[package]]
name = "filelock"
version = "3.14.0"
description = "A platform independent file lock."
optional = false
python-versions = ">=3.8"
files = [
{file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"},
{file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"},
]
[package.extras]
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
typing = ["typing-extensions (>=4.8)"]
[[package]]
name = "gunicorn"
version = "21.2.0"
@ -448,6 +507,124 @@ gevent = ["gevent (>=1.4.0)"]
setproctitle = ["setproctitle"]
tornado = ["tornado (>=0.2)"]
[[package]]
name = "hiredis"
version = "2.3.2"
description = "Python wrapper for hiredis"
optional = false
python-versions = ">=3.7"
files = [
{file = "hiredis-2.3.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:742093f33d374098aa21c1696ac6e4874b52658c870513a297a89265a4d08fe5"},
{file = "hiredis-2.3.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:9e14fb70ca4f7efa924f508975199353bf653f452e4ef0a1e47549e208f943d7"},
{file = "hiredis-2.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d7302b4b17fcc1cc727ce84ded7f6be4655701e8d58744f73b09cb9ed2b13df"},
{file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed63e8b75c193c5e5a8288d9d7b011da076cc314fafc3bfd59ec1d8a750d48c8"},
{file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b4edee59dc089bc3948f4f6fba309f51aa2ccce63902364900aa0a553a85e97"},
{file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6481c3b7673a86276220140456c2a6fbfe8d1fb5c613b4728293c8634134824"},
{file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684840b014ce83541a087fcf2d48227196576f56ae3e944d4dfe14c0a3e0ccb7"},
{file = "hiredis-2.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c4c0bcf786f0eac9593367b6279e9b89534e008edbf116dcd0de956524702c8"},
{file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66ab949424ac6504d823cba45c4c4854af5c59306a1531edb43b4dd22e17c102"},
{file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:322c668ee1c12d6c5750a4b1057e6b4feee2a75b3d25d630922a463cfe5e7478"},
{file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bfa73e3f163c6e8b2ec26f22285d717a5f77ab2120c97a2605d8f48b26950dac"},
{file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7f39f28ffc65de577c3bc0c7615f149e35bc927802a0f56e612db9b530f316f9"},
{file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:55ce31bf4711da879b96d511208efb65a6165da4ba91cb3a96d86d5a8d9d23e6"},
{file = "hiredis-2.3.2-cp310-cp310-win32.whl", hash = "sha256:3dd63d0bbbe75797b743f35d37a4cca7ca7ba35423a0de742ae2985752f20c6d"},
{file = "hiredis-2.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:ea002656a8d974daaf6089863ab0a306962c8b715db6b10879f98b781a2a5bf5"},
{file = "hiredis-2.3.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:adfbf2e9c38b77d0db2fb32c3bdaea638fa76b4e75847283cd707521ad2475ef"},
{file = "hiredis-2.3.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:80b02d27864ebaf9b153d4b99015342382eeaed651f5591ce6f07e840307c56d"},
{file = "hiredis-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd40d2e2f82a483de0d0a6dfd8c3895a02e55e5c9949610ecbded18188fd0a56"},
{file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfa904045d7cebfb0f01dad51352551cce1d873d7c3f80c7ded7d42f8cac8f89"},
{file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28bd184b33e0dd6d65816c16521a4ba1ffbe9ff07d66873c42ea4049a62fed83"},
{file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f70481213373d44614148f0f2e38e7905be3f021902ae5167289413196de4ba4"},
{file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb8797b528c1ff81eef06713623562b36db3dafa106b59f83a6468df788ff0d1"},
{file = "hiredis-2.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02fc71c8333586871602db4774d3a3e403b4ccf6446dc4603ec12df563127cee"},
{file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0da56915bda1e0a49157191b54d3e27689b70960f0685fdd5c415dacdee2fbed"},
{file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e2674a5a3168349435b08fa0b82998ed2536eb9acccf7087efe26e4cd088a525"},
{file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:dc1c3fd49930494a67dcec37d0558d99d84eca8eb3f03b17198424538f2608d7"},
{file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:14c7b43205e515f538a9defb4e411e0f0576caaeeda76bb9993ed505486f7562"},
{file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bac7e02915b970c3723a7a7c5df4ba7a11a3426d2a3f181e041aa506a1ff028"},
{file = "hiredis-2.3.2-cp311-cp311-win32.whl", hash = "sha256:63a090761ddc3c1f7db5e67aa4e247b4b3bb9890080bdcdadd1b5200b8b89ac4"},
{file = "hiredis-2.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:70d226ab0306a5b8d408235cabe51d4bf3554c9e8a72d53ce0b3c5c84cf78881"},
{file = "hiredis-2.3.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5c614552c6bd1d0d907f448f75550f6b24fb56cbfce80c094908b7990cad9702"},
{file = "hiredis-2.3.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9c431431abf55b64347ddc8df68b3ef840269cb0aa5bc2d26ad9506eb4b1b866"},
{file = "hiredis-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a45857e87e9d2b005e81ddac9d815a33efd26ec67032c366629f023fe64fb415"},
{file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e138d141ec5a6ec800b6d01ddc3e5561ce1c940215e0eb9960876bfde7186aae"},
{file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:387f655444d912a963ab68abf64bf6e178a13c8e4aa945cb27388fd01a02e6f1"},
{file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4852f4bf88f0e2d9bdf91279892f5740ed22ae368335a37a52b92a5c88691140"},
{file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d711c107e83117129b7f8bd08e9820c43ceec6204fff072a001fd82f6d13db9f"},
{file = "hiredis-2.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92830c16885f29163e1c2da1f3c1edb226df1210ec7e8711aaabba3dd0d5470a"},
{file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:16b01d9ceae265d4ab9547be0cd628ecaff14b3360357a9d30c029e5ae8b7e7f"},
{file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5986fb5f380169270a0293bebebd95466a1c85010b4f1afc2727e4d17c452512"},
{file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:49532d7939cc51f8e99efc326090c54acf5437ed88b9c904cc8015b3c4eda9c9"},
{file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8f34801b251ca43ad70691fb08b606a2e55f06b9c9fb1fc18fd9402b19d70f7b"},
{file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7298562a49d95570ab1c7fc4051e72824c6a80e907993a21a41ba204223e7334"},
{file = "hiredis-2.3.2-cp312-cp312-win32.whl", hash = "sha256:e1d86b75de787481b04d112067a4033e1ecfda2a060e50318a74e4e1c9b2948c"},
{file = "hiredis-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:6dbfe1887ffa5cf3030451a56a8f965a9da2fa82b7149357752b67a335a05fc6"},
{file = "hiredis-2.3.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:4fc242e9da4af48714199216eb535b61e8f8d66552c8819e33fc7806bd465a09"},
{file = "hiredis-2.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e81aa4e9a1fcf604c8c4b51aa5d258e195a6ba81efe1da82dea3204443eba01c"},
{file = "hiredis-2.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419780f8583ddb544ffa86f9d44a7fcc183cd826101af4e5ffe535b6765f5f6b"},
{file = "hiredis-2.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6871306d8b98a15e53a5f289ec1106a3a1d43e7ab6f4d785f95fcef9a7bd9504"},
{file = "hiredis-2.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb0b35b63717ef1e41d62f4f8717166f7c6245064957907cfe177cc144357c"},
{file = "hiredis-2.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c490191fa1218851f8a80c5a21a05a6f680ac5aebc2e688b71cbfe592f8fec6"},
{file = "hiredis-2.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4baf4b579b108062e91bd2a991dc98b9dc3dc06e6288db2d98895eea8acbac22"},
{file = "hiredis-2.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e627d8ef5e100556e09fb44c9571a432b10e11596d3c4043500080ca9944a91a"},
{file = "hiredis-2.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:ba3dc0af0def8c21ce7d903c59ea1e8ec4cb073f25ece9edaec7f92a286cd219"},
{file = "hiredis-2.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:56e9b7d6051688ca94e68c0c8a54a243f8db841911b683cedf89a29d4de91509"},
{file = "hiredis-2.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:380e029bb4b1d34cf560fcc8950bf6b57c2ef0c9c8b7c7ac20b7c524a730fadd"},
{file = "hiredis-2.3.2-cp37-cp37m-win32.whl", hash = "sha256:948d9f2ca7841794dd9b204644963a4bcd69ced4e959b0d4ecf1b8ce994a6daa"},
{file = "hiredis-2.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:cfa67afe2269b2d203cd1389c00c5bc35a287cd57860441fb0e53b371ea6a029"},
{file = "hiredis-2.3.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bcbe47da0aebc00a7cfe3ebdcff0373b86ce2b1856251c003e3d69c9db44b5a7"},
{file = "hiredis-2.3.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f2c9c0d910dd3f7df92f0638e7f65d8edd7f442203caf89c62fc79f11b0b73f8"},
{file = "hiredis-2.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:01b6c24c0840ac7afafbc4db236fd55f56a9a0919a215c25a238f051781f4772"},
{file = "hiredis-2.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1f567489f422d40c21e53212a73bef4638d9f21043848150f8544ef1f3a6ad1"},
{file = "hiredis-2.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28adecb308293e705e44087a1c2d557a816f032430d8a2a9bb7873902a1c6d48"},
{file = "hiredis-2.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27e9619847e9dc70b14b1ad2d0fb4889e7ca18996585c3463cff6c951fd6b10b"},
{file = "hiredis-2.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a0026cfbf29f07649b0e34509091a2a6016ff8844b127de150efce1c3aff60b"},
{file = "hiredis-2.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9de7586522e5da6bee83c9cf0dcccac0857a43249cb4d721a2e312d98a684d1"},
{file = "hiredis-2.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e58494f282215fc461b06709e9a195a24c12ba09570f25bdf9efb036acc05101"},
{file = "hiredis-2.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3a32b4b76d46f1eb42b24a918d51d8ca52411a381748196241d59a895f7c5c"},
{file = "hiredis-2.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1979334ccab21a49c544cd1b8d784ffb2747f99a51cb0bd0976eebb517628382"},
{file = "hiredis-2.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0c0773266e1c38a06e7593bd08870ac1503f5f0ce0f5c63f2b4134b090b5d6a4"},
{file = "hiredis-2.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bd1cee053416183adcc8e6134704c46c60c3f66b8faaf9e65bf76191ca59a2f7"},
{file = "hiredis-2.3.2-cp38-cp38-win32.whl", hash = "sha256:5341ce3d01ef3c7418a72e370bf028c7aeb16895e79e115fe4c954fff990489e"},
{file = "hiredis-2.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:8fc7197ff33047ce43a67851ccf190acb5b05c52fd4a001bb55766358f04da68"},
{file = "hiredis-2.3.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:f47775e27388b58ce52f4f972f80e45b13c65113e9e6b6bf60148f893871dc9b"},
{file = "hiredis-2.3.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:9412a06b8a8e09abd6313d96864b6d7713c6003a365995a5c70cfb9209df1570"},
{file = "hiredis-2.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3020b60e3fc96d08c2a9b011f1c2e2a6bdcc09cb55df93c509b88be5cb791df"},
{file = "hiredis-2.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53d0f2c59bce399b8010a21bc779b4f8c32d0f582b2284ac8c98dc7578b27bc4"},
{file = "hiredis-2.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57c0d0c7e308ed5280a4900d4468bbfec51f0e1b4cde1deae7d4e639bc6b7766"},
{file = "hiredis-2.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d63318ca189fddc7e75f6a4af8eae9c0545863619fb38cfba5f43e81280b286"},
{file = "hiredis-2.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e741ffe4e2db78a1b9dd6e5d29678ce37fbaaf65dfe132e5b82a794413302ef1"},
{file = "hiredis-2.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb98038ccd368e0d88bd92ee575c58cfaf33e77f788c36b2a89a84ee1936dc6b"},
{file = "hiredis-2.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:eae62ed60d53b3561148bcd8c2383e430af38c0deab9f2dd15f8874888ffd26f"},
{file = "hiredis-2.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ca33c175c1cf60222d9c6d01c38fc17ec3a484f32294af781de30226b003e00f"},
{file = "hiredis-2.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c5f6972d2bdee3cd301d5c5438e31195cf1cabf6fd9274491674d4ceb46914d"},
{file = "hiredis-2.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a6b54dabfaa5dbaa92f796f0c32819b4636e66aa8e9106c3d421624bd2a2d676"},
{file = "hiredis-2.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e96cd35df012a17c87ae276196ea8f215e77d6eeca90709eb03999e2d5e3fd8a"},
{file = "hiredis-2.3.2-cp39-cp39-win32.whl", hash = "sha256:63b99b5ea9fe4f21469fb06a16ca5244307678636f11917359e3223aaeca0b67"},
{file = "hiredis-2.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:a50c8af811b35b8a43b1590cf890b61ff2233225257a3cad32f43b3ec7ff1b9f"},
{file = "hiredis-2.3.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e8bf4444b09419b77ce671088db9f875b26720b5872d97778e2545cd87dba4a"},
{file = "hiredis-2.3.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bd42d0d45ea47a2f96babd82a659fbc60612ab9423a68e4a8191e538b85542a"},
{file = "hiredis-2.3.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80441b55edbef868e2563842f5030982b04349408396e5ac2b32025fb06b5212"},
{file = "hiredis-2.3.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec444ab8f27562a363672d6a7372bc0700a1bdc9764563c57c5f9efa0e592b5f"},
{file = "hiredis-2.3.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f9f606e810858207d4b4287b4ef0dc622c2aa469548bf02b59dcc616f134f811"},
{file = "hiredis-2.3.2-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c3dde4ca00fe9eee3b76209711f1941bb86db42b8a75d7f2249ff9dfc026ab0e"},
{file = "hiredis-2.3.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4dd676107a1d3c724a56a9d9db38166ad4cf44f924ee701414751bd18a784a0"},
{file = "hiredis-2.3.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce42649e2676ad783186264d5ffc788a7612ecd7f9effb62d51c30d413a3eefe"},
{file = "hiredis-2.3.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e3f8b1733078ac663dad57e20060e16389a60ab542f18a97931f3a2a2dd64a4"},
{file = "hiredis-2.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:532a84a82156a82529ec401d1c25d677c6543c791e54a263aa139541c363995f"},
{file = "hiredis-2.3.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4d59f88c4daa36b8c38e59ac7bffed6f5d7f68eaccad471484bf587b28ccc478"},
{file = "hiredis-2.3.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91a14dd95e24dc078204b18b0199226ee44644974c645dc54ee7b00c3157330"},
{file = "hiredis-2.3.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb777a38797c8c7df0444533119570be18d1a4ce5478dffc00c875684df7bfcb"},
{file = "hiredis-2.3.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d47c915897a99d0d34a39fad4be97b4b709ab3d0d3b779ebccf2b6024a8c681e"},
{file = "hiredis-2.3.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:333b5e04866758b11bda5f5315b4e671d15755fc6ed3b7969721bc6311d0ee36"},
{file = "hiredis-2.3.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c8937f1100435698c18e4da086968c4b5d70e86ea718376f833475ab3277c9aa"},
{file = "hiredis-2.3.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa45f7d771094b8145af10db74704ab0f698adb682fbf3721d8090f90e42cc49"},
{file = "hiredis-2.3.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d5ebc93c39aed4b5bc769f8ce0819bc50e74bb95d57a35f838f1c4378978e0"},
{file = "hiredis-2.3.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a797d8c7df9944314d309b0d9e1b354e2fa4430a05bb7604da13b6ad291bf959"},
{file = "hiredis-2.3.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e15a408f71a6c8c87b364f1f15a6cd9c1baca12bbc47a326ac8ab99ec7ad3c64"},
{file = "hiredis-2.3.2.tar.gz", hash = "sha256:733e2456b68f3f126ddaf2cd500a33b25146c3676b97ea843665717bda0c5d43"},
]
[[package]]
name = "html-tag-names"
version = "0.1.2"
@ -470,15 +647,40 @@ files = [
{file = "html_void_elements-0.1.0-py3-none-any.whl", hash = "sha256:784cf39db03cdeb017320d9301009f8f3480f9d7b254d0974272e80e0cb5e0d2"},
]
[[package]]
name = "identify"
version = "2.5.36"
description = "File identification library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"},
{file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"},
]
[package.extras]
license = ["ukkonen"]
[[package]]
name = "idna"
version = "3.6"
version = "3.7"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
files = [
{file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
]
[[package]]
name = "iso8601"
version = "2.1.0"
description = "Simple module to parse ISO 8601 dates"
optional = false
python-versions = ">=3.7,<4.0"
files = [
{file = "iso8601-2.1.0-py3-none-any.whl", hash = "sha256:aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242"},
{file = "iso8601-2.1.0.tar.gz", hash = "sha256:6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df"},
]
[[package]]
@ -497,13 +699,13 @@ six = ">=1.13.0"
[[package]]
name = "json5"
version = "0.9.24"
version = "0.9.25"
description = "A Python implementation of the JSON5 data format."
optional = false
python-versions = ">=3.8"
files = [
{file = "json5-0.9.24-py3-none-any.whl", hash = "sha256:4ca101fd5c7cb47960c055ef8f4d0e31e15a7c6c48c3b6f1473fc83b6c462a13"},
{file = "json5-0.9.24.tar.gz", hash = "sha256:0c638399421da959a20952782800e5c1a78c14e08e1dc9738fa10d8ec14d58c8"},
{file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"},
{file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"},
]
[[package]]
@ -530,6 +732,75 @@ profiling = ["gprof2dot"]
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
[[package]]
name = "markupsafe"
version = "2.1.5"
description = "Safely add untrusted strings to HTML/XML markup."
optional = false
python-versions = ">=3.7"
files = [
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
{file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
{file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
{file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
{file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
{file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
{file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
{file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
{file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
{file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
{file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
]
[[package]]
name = "mdurl"
version = "0.1.2"
@ -541,6 +812,20 @@ files = [
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
[[package]]
name = "nodeenv"
version = "1.8.0"
description = "Node.js virtual environment builder"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
files = [
{file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
{file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
]
[package.dependencies]
setuptools = "*"
[[package]]
name = "packaging"
version = "24.0"
@ -563,20 +848,150 @@ files = [
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
]
[[package]]
name = "platformdirs"
version = "4.2.2"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
python-versions = ">=3.8"
files = [
{file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"},
{file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"},
]
[package.extras]
docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
type = ["mypy (>=1.8)"]
[[package]]
name = "pre-commit"
version = "3.7.1"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
optional = false
python-versions = ">=3.9"
files = [
{file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"},
{file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"},
]
[package.dependencies]
cfgv = ">=2.0.0"
identify = ">=1.0.0"
nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
[[package]]
name = "psycopg"
version = "3.1.19"
description = "PostgreSQL database adapter for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "psycopg-3.1.19-py3-none-any.whl", hash = "sha256:dca5e5521c859f6606686432ae1c94e8766d29cc91f2ee595378c510cc5b0731"},
{file = "psycopg-3.1.19.tar.gz", hash = "sha256:92d7b78ad82426cdcf1a0440678209faa890c6e1721361c2f8901f0dccd62961"},
]
[package.dependencies]
psycopg-binary = {version = "3.1.19", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""}
typing-extensions = ">=4.1"
tzdata = {version = "*", markers = "sys_platform == \"win32\""}
[package.extras]
binary = ["psycopg-binary (==3.1.19)"]
c = ["psycopg-c (==3.1.19)"]
dev = ["black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"]
docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"]
pool = ["psycopg-pool"]
test = ["anyio (>=3.6.2,<4.0)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"]
[[package]]
name = "psycopg-binary"
version = "3.1.19"
description = "PostgreSQL database adapter for Python -- C optimisation distribution"
optional = false
python-versions = ">=3.7"
files = [
{file = "psycopg_binary-3.1.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7204818f05151dd08f8f851defb01972ec9d2cc925608eb0de232563f203f354"},
{file = "psycopg_binary-3.1.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4e67fd86758dbeac85641419a54f84d74495a8683b58ad5dfad08b7fc37a8f"},
{file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12173e34b176e93ad2da913de30f774d5119c2d4d4640c6858d2d77dfa6c9bf"},
{file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052f5193304066318853b4b2e248f523c8f52b371fc4e95d4ef63baee3f30955"},
{file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29008f3f8977f600b8a7fb07c2e041b01645b08121760609cc45e861a0364dc9"},
{file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6a9a651a08d876303ed059c9553df18b3c13c3406584a70a8f37f1a1fe2709"},
{file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:91a645e6468c4f064b7f4f3b81074bdd68fe5aa2b8c5107de15dcd85ba6141be"},
{file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5c6956808fd5cf0576de5a602243af8e04594b25b9a28675feddc71c5526410a"},
{file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:1622ca27d5a7a98f7d8f35e8b146dc7efda4a4b6241d2edf7e076bd6bcecbeb4"},
{file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a100482950a55228f648bd382bb71bfaff520002f29845274fccbbf02e28bd52"},
{file = "psycopg_binary-3.1.19-cp310-cp310-win_amd64.whl", hash = "sha256:955ca8905c0251fc4af7ce0a20999e824a25652f53a558ab548b60969f1f368e"},
{file = "psycopg_binary-3.1.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cf49e91dcf699b8a449944ed898ef1466b39b92720613838791a551bc8f587a"},
{file = "psycopg_binary-3.1.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:964c307e400c5f33fa762ba1e19853e048814fcfbd9679cc923431adb7a2ead2"},
{file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3433924e1b14074798331dc2bfae2af452ed7888067f2fc145835704d8981b15"},
{file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00879d4c6be4b3afc510073f48a5e960f797200e261ab3d9bd9b7746a08c669d"},
{file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a6997c80f86d3dd80a4f078bb3b200079c47eeda4fd409d8899b883c90d2ac"},
{file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0106e42b481677c41caa69474fe530f786dcef88b11b70000f0e45a03534bc8f"},
{file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81efe09ba27533e35709905c3061db4dc9fb814f637360578d065e2061fbb116"},
{file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d312d6dddc18d9c164e1893706269c293cba1923118349d375962b1188dafb01"},
{file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:bfd2c734da9950f7afaad5f132088e0e1478f32f042881fca6651bb0c8d14206"},
{file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8a732610a5a6b4f06dadcf9288688a8ff202fd556d971436a123b7adb85596e2"},
{file = "psycopg_binary-3.1.19-cp311-cp311-win_amd64.whl", hash = "sha256:321814a9a3ad785855a821b842aba08ca1b7de7dfb2979a2f0492dca9ec4ae70"},
{file = "psycopg_binary-3.1.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4aa0ca13bb8a725bb6d12c13999217fd5bc8b86a12589f28a74b93e076fbb959"},
{file = "psycopg_binary-3.1.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:469424e354ebcec949aa6aa30e5a9edc352a899d9a68ad7a48f97df83cc914cf"},
{file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04f5349313529ae1f1c42fe1aa0443faaf50fdf12d13866c2cc49683bfa53d0"},
{file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959feabddc7fffac89b054d6f23f3b3c62d7d3c90cd414a02e3747495597f150"},
{file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e9da624a6ca4bc5f7fa1f03f8485446b5b81d5787b6beea2b4f8d9dbef878ad7"},
{file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1823221a6b96e38b15686170d4fc5b36073efcb87cce7d3da660440b50077f6"},
{file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:866db42f986298f0cf15d805225eb8df2228bf19f7997d7f1cb5f388cbfc6a0f"},
{file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:738c34657305b5973af6dbb6711b07b179dfdd21196d60039ca30a74bafe9648"},
{file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb9758473200384a04374d0e0cac6f451218ff6945a024f65a1526802c34e56e"},
{file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0e991632777e217953ac960726158987da684086dd813ac85038c595e7382c91"},
{file = "psycopg_binary-3.1.19-cp312-cp312-win_amd64.whl", hash = "sha256:1d87484dd42c8783c44a30400949efb3d81ef2487eaa7d64d1c54df90cf8b97a"},
{file = "psycopg_binary-3.1.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d1d1723d7449c12bb61aca7eb6e0c6ab2863cd8dc0019273cc4d4a1982f84bdb"},
{file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538a8671005641fa195eab962f85cf0504defbd3b548c4c8fc27102a59f687b"},
{file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c50592bc8517092f40979e4a5d934f96a1737a77724bb1d121eb78b614b30fc8"},
{file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95f16ae82bc242b76cd3c3e5156441e2bd85ff9ec3a9869d750aad443e46073c"},
{file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebd1e98e865e9a28ce0cb2c25b7dfd752f0d1f0a423165b55cd32a431dcc0f4"},
{file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:49cd7af7d49e438a39593d1dd8cab106a1912536c2b78a4d814ebdff2786094e"},
{file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:affebd61aa3b7a8880fd4ac3ee94722940125ff83ff485e1a7c76be9adaabb38"},
{file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d1bac282f140fa092f2bbb6c36ed82270b4a21a6fc55d4b16748ed9f55e50fdb"},
{file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1285aa54449e362b1d30d92b2dc042ad3ee80f479cc4e323448d0a0a8a1641fa"},
{file = "psycopg_binary-3.1.19-cp37-cp37m-win_amd64.whl", hash = "sha256:6cff31af8155dc9ee364098a328bab688c887c732c66b8d027e5b03818ca0287"},
{file = "psycopg_binary-3.1.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9b689c4a17dd3130791dcbb8c30dbf05602f7c2d56c792e193fb49adc7bf5f8"},
{file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:017518bd2de4851adc826a224fb105411e148ad845e11355edd6786ba3dfedf5"},
{file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c35fd811f339a3cbe7f9b54b2d9a5e592e57426c6cc1051632a62c59c4810208"},
{file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38ed45ec9673709bfa5bc17f140e71dd4cca56d4e58ef7fd50d5a5043a4f55c6"},
{file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:433f1c256108f9e26f480a8cd6ddb0fb37dbc87d7f5a97e4540a9da9b881f23f"},
{file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ed61e43bf5dc8d0936daf03a19fef3168d64191dbe66483f7ad08c4cea0bc36b"},
{file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ae8109ff9fdf1fa0cb87ab6645298693fdd2666a7f5f85660df88f6965e0bb7"},
{file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a53809ee02e3952fae7977c19b30fd828bd117b8f5edf17a3a94212feb57faaf"},
{file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9d39d5ffc151fb33bcd55b99b0e8957299c0b1b3e5a1a5f4399c1287ef0051a9"},
{file = "psycopg_binary-3.1.19-cp38-cp38-win_amd64.whl", hash = "sha256:e14bc8250000921fcccd53722f86b3b3d1b57db901e206e49e2ab2afc5919c2d"},
{file = "psycopg_binary-3.1.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd88c5cea4efe614d5004fb5f5dcdea3d7d59422be796689e779e03363102d24"},
{file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:621a814e60825162d38760c66351b4df679fd422c848b7c2f86ad399bff27145"},
{file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46e50c05952b59a214e27d3606f6d510aaa429daed898e16b8a37bfbacc81acc"},
{file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03354a9db667c27946e70162cb0042c3929154167f3678a30d23cebfe0ad55b5"},
{file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c2f3b79037581afec7baa2bdbcb0a1787f1758744a7662099b0eca2d721cb"},
{file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6469ebd9e93327e9f5f36dcf8692fb1e7aeaf70087c1c15d4f2c020e0be3a891"},
{file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:85bca9765c04b6be90cb46e7566ffe0faa2d7480ff5c8d5e055ac427f039fd24"},
{file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a836610d5c75e9cff98b9fdb3559c007c785c09eaa84a60d5d10ef6f85f671e8"},
{file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ef8de7a1d9fb3518cc6b58e3c80b75a824209ad52b90c542686c912db8553dad"},
{file = "psycopg_binary-3.1.19-cp39-cp39-win_amd64.whl", hash = "sha256:76fcd33342f38e35cd6b5408f1bc117d55ab8b16e5019d99b6d3ce0356c51717"},
]
[[package]]
name = "pydantic"
version = "2.6.4"
version = "2.7.1"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
files = [
{file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"},
{file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"},
{file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"},
{file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"},
]
[package.dependencies]
annotated-types = ">=0.4.0"
pydantic-core = "2.16.3"
pydantic-core = "2.18.2"
typing-extensions = ">=4.6.1"
[package.extras]
@ -584,90 +999,90 @@ email = ["email-validator (>=2.0.0)"]
[[package]]
name = "pydantic-core"
version = "2.16.3"
description = ""
version = "2.18.2"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
files = [
{file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"},
{file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"},
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"},
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"},
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"},
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"},
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"},
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"},
{file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"},
{file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"},
{file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"},
{file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"},
{file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"},
{file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"},
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"},
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"},
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"},
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"},
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"},
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"},
{file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"},
{file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"},
{file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"},
{file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"},
{file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"},
{file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"},
{file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"},
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"},
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"},
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"},
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"},
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"},
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"},
{file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"},
{file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"},
{file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"},
{file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"},
{file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"},
{file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"},
{file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"},
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"},
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"},
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"},
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"},
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"},
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"},
{file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"},
{file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"},
{file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"},
{file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"},
{file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"},
{file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"},
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"},
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"},
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"},
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"},
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"},
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"},
{file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"},
{file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"},
{file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"},
{file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"},
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"},
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"},
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"},
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"},
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"},
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"},
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"},
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"},
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"},
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"},
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"},
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"},
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"},
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"},
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"},
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"},
{file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"},
{file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"},
{file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"},
{file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"},
{file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"},
{file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"},
{file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"},
{file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"},
{file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"},
{file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"},
{file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"},
{file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"},
{file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"},
{file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"},
{file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"},
{file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"},
{file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"},
{file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"},
{file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"},
{file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"},
{file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"},
{file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"},
{file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"},
{file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"},
{file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"},
{file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"},
{file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"},
{file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"},
{file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"},
{file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"},
{file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"},
{file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"},
{file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"},
{file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"},
{file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"},
{file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"},
{file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"},
{file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"},
{file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"},
{file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"},
{file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"},
{file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"},
{file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"},
{file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"},
{file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"},
{file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"},
{file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"},
{file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"},
{file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"},
{file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"},
{file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"},
{file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"},
{file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"},
{file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"},
{file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"},
{file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"},
{file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"},
{file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"},
{file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"},
{file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"},
{file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"},
{file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"},
{file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"},
{file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"},
{file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"},
{file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"},
{file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"},
{file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"},
{file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"},
{file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"},
{file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"},
{file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"},
{file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"},
{file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"},
{file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"},
{file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"},
{file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"},
{file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"},
{file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"},
{file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"},
]
[package.dependencies]
@ -675,17 +1090,16 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pygments"
version = "2.17.2"
version = "2.18.0"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"},
{file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"},
{file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
{file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
]
[package.extras]
plugins = ["importlib-metadata"]
windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
@ -787,6 +1201,51 @@ files = [
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
]
[[package]]
name = "reader"
version = "3.12"
description = "A Python feed reader library."
optional = false
python-versions = ">=3.10"
files = [
{file = "reader-3.12-py3-none-any.whl", hash = "sha256:bab08ce6733b12ab50cd376b48305eb4d74f9560ade7fdd932f0339f4c666590"},
{file = "reader-3.12.tar.gz", hash = "sha256:5ee788184c0a9ee92be4d5e6dac4a4e964d1f99853e9f85d3ccdc495891790f8"},
]
[package.dependencies]
beautifulsoup4 = ">=4.5"
feedparser = ">=6"
iso8601 = ">=1"
requests = ">=2.18"
typing-extensions = ">=4"
werkzeug = ">2"
[package.extras]
app = ["PyYAML", "flask (>=0.10)", "humanize (>=4,!=4.7.*)"]
cli = ["PyYAML", "click (>=7)"]
dev = ["build", "pre-commit", "reader[app,cli,docs,tests,unstable-plugins]", "tox", "twine"]
docs = ["click (>=7)", "setuptools", "sphinx", "sphinx-click", "sphinx-hoverxref", "sphinx-rtd-theme (>=1.3.0rc1)", "sphinxcontrib-log-cabinet"]
tests = ["coverage", "flaky", "html5lib", "lxml", "mechanicalsoup", "mypy", "numpy", "pytest (>=4)", "pytest-cov", "pytest-randomly", "pytest-subtests", "requests-mock", "requests-wsgi-adapter", "types-beautifulsoup4", "types-requests"]
unstable-plugins = ["beautifulsoup4", "blinker (>=1.4)", "mutagen", "requests", "tabulate"]
[[package]]
name = "redis"
version = "5.0.4"
description = "Python client for Redis database and key-value store"
optional = false
python-versions = ">=3.7"
files = [
{file = "redis-5.0.4-py3-none-any.whl", hash = "sha256:7adc2835c7a9b5033b7ad8f8918d09b7344188228809c98df07af226d39dec91"},
{file = "redis-5.0.4.tar.gz", hash = "sha256:ec31f2ed9675cc54c21ba854cfe0462e6faf1d83c8ce5944709db8a4700b9c61"},
]
[package.dependencies]
hiredis = {version = ">=1.0.0", optional = true, markers = "extra == \"hiredis\""}
[package.extras]
hiredis = ["hiredis (>=1.0.0)"]
ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
[[package]]
name = "regex"
version = "2023.12.25"
@ -930,30 +1389,46 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "ruff"
version = "0.3.4"
version = "0.3.7"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:60c870a7d46efcbc8385d27ec07fe534ac32f3b251e4fc44b3cbfd9e09609ef4"},
{file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6fc14fa742e1d8f24910e1fff0bd5e26d395b0e0e04cc1b15c7c5e5fe5b4af91"},
{file = "ruff-0.3.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3ee7880f653cc03749a3bfea720cf2a192e4f884925b0cf7eecce82f0ce5854"},
{file = "ruff-0.3.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf133dd744f2470b347f602452a88e70dadfbe0fcfb5fd46e093d55da65f82f7"},
{file = "ruff-0.3.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3860057590e810c7ffea75669bdc6927bfd91e29b4baa9258fd48b540a4365"},
{file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:986f2377f7cf12efac1f515fc1a5b753c000ed1e0a6de96747cdf2da20a1b369"},
{file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fd98e85869603e65f554fdc5cddf0712e352fe6e61d29d5a6fe087ec82b76c"},
{file = "ruff-0.3.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64abeed785dad51801b423fa51840b1764b35d6c461ea8caef9cf9e5e5ab34d9"},
{file = "ruff-0.3.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df52972138318bc7546d92348a1ee58449bc3f9eaf0db278906eb511889c4b50"},
{file = "ruff-0.3.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:98e98300056445ba2cc27d0b325fd044dc17fcc38e4e4d2c7711585bd0a958ed"},
{file = "ruff-0.3.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:519cf6a0ebed244dce1dc8aecd3dc99add7a2ee15bb68cf19588bb5bf58e0488"},
{file = "ruff-0.3.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb0acfb921030d00070539c038cd24bb1df73a2981e9f55942514af8b17be94e"},
{file = "ruff-0.3.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cf187a7e7098233d0d0c71175375c5162f880126c4c716fa28a8ac418dcf3378"},
{file = "ruff-0.3.4-py3-none-win32.whl", hash = "sha256:af27ac187c0a331e8ef91d84bf1c3c6a5dea97e912a7560ac0cef25c526a4102"},
{file = "ruff-0.3.4-py3-none-win_amd64.whl", hash = "sha256:de0d5069b165e5a32b3c6ffbb81c350b1e3d3483347196ffdf86dc0ef9e37dd6"},
{file = "ruff-0.3.4-py3-none-win_arm64.whl", hash = "sha256:6810563cc08ad0096b57c717bd78aeac888a1bfd38654d9113cb3dc4d3f74232"},
{file = "ruff-0.3.4.tar.gz", hash = "sha256:f0f4484c6541a99862b693e13a151435a279b271cff20e37101116a21e2a1ad1"},
{file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"},
{file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"},
{file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"},
{file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"},
{file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"},
{file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"},
{file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"},
{file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"},
{file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"},
{file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"},
{file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"},
{file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"},
{file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"},
{file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"},
{file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"},
{file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"},
{file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"},
]
[[package]]
name = "setuptools"
version = "69.5.1"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
{file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"},
{file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "sgmllib3k"
version = "1.0.0"
@ -975,31 +1450,41 @@ files = [
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]]
name = "soupsieve"
version = "2.5"
description = "A modern CSS selector implementation for Beautiful Soup."
optional = false
python-versions = ">=3.8"
files = [
{file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"},
{file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
]
[[package]]
name = "sqlparse"
version = "0.4.4"
version = "0.5.0"
description = "A non-validating SQL parser."
optional = false
python-versions = ">=3.5"
python-versions = ">=3.8"
files = [
{file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"},
{file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"},
{file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"},
{file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"},
]
[package.extras]
dev = ["build", "flake8"]
dev = ["build", "hatch"]
doc = ["sphinx"]
test = ["pytest", "pytest-cov"]
[[package]]
name = "tqdm"
version = "4.66.2"
version = "4.66.4"
description = "Fast, Extensible Progress Meter"
optional = false
python-versions = ">=3.7"
files = [
{file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"},
{file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"},
{file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"},
{file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"},
]
[package.dependencies]
@ -1013,13 +1498,13 @@ telegram = ["requests"]
[[package]]
name = "typing-extensions"
version = "4.10.0"
version = "4.11.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"},
{file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"},
{file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
{file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
]
[[package]]
@ -1067,6 +1552,43 @@ h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "virtualenv"
version = "20.26.2"
description = "Virtual Python Environment builder"
optional = false
python-versions = ">=3.7"
files = [
{file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"},
{file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"},
]
[package.dependencies]
distlib = ">=0.3.7,<1"
filelock = ">=3.12.2,<4"
platformdirs = ">=3.9.1,<5"
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
[[package]]
name = "werkzeug"
version = "3.0.3"
description = "The comprehensive WSGI web application library."
optional = false
python-versions = ">=3.8"
files = [
{file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"},
{file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"},
]
[package.dependencies]
MarkupSafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog (>=2.3)"]
[[package]]
name = "whitenoise"
version = "6.6.0"
@ -1087,4 +1609,4 @@ brotli = ["Brotli"]
[metadata]
lock-version = "2.0"
python-versions = "^3.12"
content-hash = "f28bf9b1d944ec8c8eaf82b086ebc4603cd3e844d8a2e4f80b8ab6c6ad2cab7a"
content-hash = "7e636d161791c07443e9ed457317703ac8cd0f52a2de870ff7aa869e877640eb"

View file

@ -7,27 +7,27 @@ readme = "README.md"
[tool.poetry.dependencies]
python = "^3.12"
django = {git = "https://github.com/django/django.git"}
django = "^5.0.6"
python-dotenv = "^1.0.1"
feedparser = "^6.0.11"
gunicorn = "^21.2.0"
dateparser = "^1.2.0"
discord-webhook = "^1.3.1"
django-ninja = "^1.1.0"
django-debug-toolbar = "^4.3.0"
whitenoise = { extras = ["brotli"], version = "^6.6.0" }
rich = "^13.7.1"
django-htmx = "^1.17.3"
django-redis = {extras = ["hiredis"], version = "^5.4.0"}
psycopg = {extras = ["binary"], version = "^3.1.19"} # TODO: Use psycopg[c] in Dockerfile
reader = "^3.12"
[tool.poetry.group.dev.dependencies]
ruff = "^0.3.0"
djlint = "^1.34.1"
pre-commit = "^3.7.1"
[build-system]
build-backend = "poetry.core.masonry.api"
requires = [
"poetry-core",
]
requires = ["poetry-core"]
[tool.ruff]
exclude = ["migrations"]
@ -37,19 +37,23 @@ preview = true
line-length = 120
lint.select = ["ALL"]
lint.ignore = [
"ARG001", # Checks for the presence of unused arguments in function definitions.
"ARG002", # Checks for the presence of unused arguments in instance method definitions.
"COM812", # Checks for the absence of trailing commas.
"CPY001", # Missing copyright notice at top of file
"ERA001", # Found commented-out code
"FIX002", # Line contains TODO
"D100", # Checks for undocumented public module definitions.
"D101", # Checks for undocumented public class definitions.
"D102", # Checks for undocumented public method definitions.
"D104", # Missing docstring in public package.
"D105", # Missing docstring in magic method.
"D106", # Checks for undocumented public class definitions, for nested classes.
"COM812", # Checks for the absence of trailing commas.
"D402", # Checks for function docstrings that include the function's signature in the summary line.
"DJ001", # Checks nullable string-based fields (like CharField and TextField) in Django models.
"ERA001", # Found commented-out code
"FIX002", # Line contains TODO
"ISC001", # Checks for implicitly concatenated strings on a single line.
"PLR6301", # Checks for the presence of unused self parameter in methods definitions.
"ARG001", # Checks for the presence of unused arguments in function definitions.
"TD003", # Checks that a TODO comment is associated with a link to a relevant issue or ticket.
]
[tool.ruff.lint.pydocstyle]

1
static/htmx.min.js vendored

File diff suppressed because one or more lines are too long

View file

@ -1,9 +0,0 @@
{% extends "base.html" %}
{% block content %}
<h2>Change Password</h2>
<form method="post">
{% csrf_token %}
{{ form.as_p }}
<button type="submit">Change Password</button>
</form>
{% endblock %}

View file

@ -1,12 +0,0 @@
{% extends "base.html" %}
{% block content %}
<p>
You can register <a href="{% url 'register' %}">here</a>.
</p>
<h2>Login</h2>
<form method="post">
{% csrf_token %}
{{ form.as_p }}
<button type="submit">Login</button>
</form>
{% endblock %}

View file

@ -1,50 +0,0 @@
{% extends "base.html" %}
{% block content %}
<h2>{{ user.username }}</h2>
<form method="post" action="{% url 'logout' %}">
{% csrf_token %}
<button type="submit">Logout</button>
</form>
<h3>Feeds</h3>
<ul>
{% for feed in user_feeds %}
<li>
<a href='{% url "feed" feed.id %}'>{{ feed.feed_url }}</a>
</li>
{% endfor %}
</ul>
<h3>Uploads</h3>
{% if user_uploads %}
<p>Uploaded files:</p>
{% else %}
<p>No uploaded files yet.</p>
{% endif %}
<ul id="uploads-list">
{% for upload in user_uploads %}
<li id="upload-{{ upload.id }}">
<a href="{{ upload.get_absolute_url }}">{{ upload }}</a>
<p>
Description: <span id="description-{{ upload.id }}">{{ upload.description|default:"No description" }}</span>
{% if upload.notes %}
<br>
Notes: {{ upload.notes }}
{% endif %}
</p>
<form method="post" action="{% url 'delete_upload' %}">
{% csrf_token %}
<input type="hidden" name="file_id" value="{{ upload.id }}">
<button type="submit">Delete</button>
</form>
<form method="post"
hx-post="{% url 'edit_description' %}"
hx-target="#description-{{ upload.id }}"
hx-swap="innerHTML">
{% csrf_token %}
<input type="hidden" name="file_id" value="{{ upload.id }}">
<input type="text" name="description" placeholder="New description" required>
<button type="submit">Edit description</button>
</form>
</li>
{% endfor %}
</ul>
{% endblock %}

View file

@ -1,9 +0,0 @@
{% extends "base.html" %}
{% block content %}
<h2>Register</h2>
<form method="post">
{% csrf_token %}
{{ form.as_p }}
<button type="submit">Register</button>
</form>
{% endblock %}

View file

@ -84,7 +84,7 @@
{% endif %}
<span class="title">
<h1>
<a href="{% url 'index' %}">FeedVault</a>
<a href="{% url 'feeds:index' %}">FeedVault</a>
</h1>
</span>
<div class="leftright">
@ -96,7 +96,7 @@
</small>
</div>
<div class="right">
<form action="{% url 'search' %}" method="get">
<form action="{% url 'feeds:search' %}" method="get">
<input type="text" name="q" placeholder="Search" />
<button type="submit">Search</button>
</form>
@ -106,22 +106,15 @@
<small>
<div class="leftright">
<div class="left">
<a href="{% url 'index' %}">Home</a> |
<a href="{% url 'domains' %}">Domains</a> |
<a href="{% url 'feeds' %}">Feeds</a> |
<a href="{% url 'api_v1:openapi-view' %}">API</a>
<a href="{% url 'feeds:index' %}">Home</a> |
<a href="{% url 'feeds:feeds' %}">Feeds</a> |
<a href="{% url 'feeds:upload' %}">Upload</a>
</div>
<div class="right">
<a href="https://github.com/TheLovinator1/FeedVault">GitHub</a> |
<a href="https://github.com/sponsors/TheLovinator1">Donate</a>
<!-- Show login if not logged in -->
{% if not user.is_authenticated %}
| <a href="{% url 'login' %}">Login</a>
{% endif %}
<!-- Show username if logged in -->
{% if user.is_authenticated %}
| <a href="{% url 'profile' %}">{{ user.username }}</a>
{% endif %}
{% if not user.is_authenticated %}| <a href="">Login</a>{% endif %}
{% if user.is_authenticated %}| <a href="">{{ user.username }}</a>{% endif %}
</div>
</div>
</small>
@ -135,14 +128,12 @@
<small>
<div class="leftright">
<div class="left">
Made by <a href="https://github.com/TheLovinator1">Joakim Hellsén</a>.
<a href="">Privacy Policy</a> | <a href="">Terms of Service</a>
</div>
<div class="right">No rights reserved.</div>
</div>
<div class="leftright">
<div class="left">
<a href="mailto:hello@feedvault.se">hello@feedvault.se</a>
</div>
<div class="left">TheLovinator#9276 on Discord</div>
<div class="right">A birthday present for Plipp ❤️</div>
</div>
</small>

View file

@ -1,15 +0,0 @@
{% extends "base.html" %}
{% block content %}
<h2>{{ domain.url }}</h2>
<p>This domain was added to the database on {{ domain.created_at|date }}.</p>
<p>Feeds for this domain:</p>
<ul>
{% for feed in feeds %}
<li>
<a href="{% url 'feed' feed.id %}">{{ feed.feed_url }}</a>
</li>
{% empty %}
<li>Found no feeds for this domain.</li>
{% endfor %}
</ul>
{% endblock %}

View file

@ -1,5 +0,0 @@
{% extends "base.html" %}
{% block content %}
<h2>Domains</h2>
{% include "partials/domains.html" %}
{% endblock %}

View file

@ -1,6 +1,6 @@
{% extends "base.html" %}
{% block content %}
<h2>{{ feed.feed_url }}</h2>
<h2>{{ feed.url }}</h2>
<p>{{ feed.description }}</p>
<h3>Entries</h3>
<ul>

View file

@ -1,5 +1,12 @@
{% extends "base.html" %}
{% block content %}
<h2>Latest Feeds</h2>
{% include "partials/feeds.html" %}
<ul>
{% for feed in feeds %}
<li>
<a href="{% url 'feeds:feed' feed.id %}">{{ feed.url }}</a>
<p>{{ feed.description }}</p>
</li>
{% endfor %}
</ul>
{% endblock %}

View file

@ -1,52 +1,14 @@
{% extends "base.html" %}
{% block content %}
{% if user.is_authenticated %}
<h2>Welcome, {{ user.username }}!</h2>
<p>
Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived.
</p>
<form action="{% url 'add' %}" method='post'>
{% csrf_token %}
<textarea id="urls" name="urls" rows="5" cols="50" required></textarea>
<button type="submit">Add feeds</button>
</form>
<br>
<h2>Upload</h2>
<p>
You can also upload files containing the feeds you wish to archive.
Currently supported file formats: .opml, .xml, .json.
Your file will be parsed in the future if not currently supported.
Feel free to upload databases, backups, or any other files containing feeds.
</p>
<form enctype="multipart/form-data"
method="post"
action="{% url 'upload' %}">
{% csrf_token %}
<p>
<label for="file">Choose a file to upload</label>
<br>
<br>
<input type="file" name="file" id="file" required>
<br>
<br>
<label for="description">Description (optional)</label>
<input type="text"
name="description"
id="description"
size="80"
placeholder="Description (optional)">
</p>
<button type="submit">Upload file</button>
</form>
{% else %}
<h2>Welcome to FeedVault!</h2>
<p>
FeedVault is a service that archives web feeds. It allows users to access and search for historical content from various websites. The service is designed to preserve the history of the web and provide a reliable source for accessing content that may no longer be available on the original websites.
</p>
<p>
You need to <a href="{% url 'login' %}">login</a> or <a href="{% url 'register' %}">register</a> to add new feeds or upload files.
</p>
{% endif %}
<h2>Welcome, archivist!</h2>
<p>
Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived.
</p>
<form action="{% url 'feeds:add' %}" method='post'>
{% csrf_token %}
<textarea id="urls" name="urls" rows="5" cols="50" required></textarea>
<button type="submit">Add feeds</button>
</form>
<h2>FAQ</h2>
<details>
<summary>What are web feeds?</summary>

View file

@ -1,12 +0,0 @@
{% if domains %}
{% for domain in domains %}
<a href="{% url 'domain' domain.id %}">{{ domain.url }}</a> - {{ domain.created_at|date }}
<br>
{% endfor %}
{% else %}
<p>No domains yet. Time to add some!</p>
{% endif %}
<div hx-get="{% url 'domains' %}?page={{ page|add:1 }}"
hx-trigger="revealed"
hx-target="this"
hx-swap="outerHTML">Loading...</div>

View file

@ -1,12 +0,0 @@
{% if feeds %}
{% for feed in feeds %}
<a href="{% url 'feed' feed.id %}">{{ feed.feed_url|default:"Unknown Feed" }} →</a>
<br>
{% endfor %}
{% else %}
<p>No feeds yet. Time to add some!</p>
{% endif %}
<div hx-get="{% url 'feeds' %}?page={{ page|add:1 }}"
hx-trigger="revealed"
hx-target="this"
hx-swap="outerHTML">Loading...</div>

View file

@ -6,7 +6,7 @@
</h2>
{% if feeds %}
{% for feed in feeds %}
<a href="{% url 'feed' feed.id %}">{{ feed.feed_url|default:"Unknown Feed" }} →</a>
<a href="{% url 'feeds:feed' feed.id %}">{{ feed.url|default:"Unknown Feed" }} →</a>
<br>
{% endfor %}
{% else %}

27
templates/upload.html Normal file
View file

@ -0,0 +1,27 @@
<h2>Upload</h2>
<p>
You can also upload files containing the feeds you wish to archive.
Currently supported file formats: .opml, .xml, .json.
Your file will be parsed in the future if not currently supported.
Feel free to upload databases, backups, or any other files containing feeds.
</p>
<form enctype="multipart/form-data"
method="post"
action="{% url 'feeds:upload' %}">
{% csrf_token %}
<p>
<label for="file">Choose a file to upload</label>
<br>
<br>
<input type="file" name="file" id="file" required>
<br>
<br>
<label for="description">Description (optional)</label>
<input type="text"
name="description"
id="description"
size="80"
placeholder="Description (optional)">
</p>
<button type="submit">Upload file</button>
</form>