Move back to Python 🙃

This commit is contained in:
Joakim Hellsén 2024-02-18 22:00:28 +01:00
commit d7be14f5a2
60 changed files with 757 additions and 5605 deletions

View file

@ -1,56 +0,0 @@
root = "."
testdata_dir = "testdata"
tmp_dir = "tmp"
[build]
args_bin = []
bin = "tmp\\main.exe"
cmd = "go build -o ./tmp/main.exe ."
delay = 1000
exclude_dir = [
"assets",
"static",
"tmp",
"vendor",
"testdata",
"tests",
".git",
".vscode",
"data",
]
exclude_file = []
exclude_regex = ["_test.go"]
exclude_unchanged = false
follow_symlink = false
full_bin = ""
include_dir = []
include_ext = ["go", "tpl", "tmpl", "html"]
include_file = []
kill_delay = "0s"
log = "build-errors.log"
poll = false
poll_interval = 0
post_cmd = []
pre_cmd = []
rerun = false
rerun_delay = 500
send_interrupt = false
stop_on_error = false
[color]
app = ""
build = "yellow"
main = "magenta"
runner = "green"
watcher = "cyan"
[log]
main_only = false
time = false
[misc]
clean_on_exit = false
[screen]
clear_on_rebuild = false
keep_scroll = true

View file

@ -1,5 +1,4 @@
PORT=8000
HOST=127.0.0.1
DATABASE_URL=postgresql://localhost/feedvault?user=feedvault&password=feedvault
DISCORD_WEBHOOK_URL=
POSTGRES_USER=
POSTGRES_PASSWORD=
POSTGRES_DB=feedvault
SECRET_KEY=

View file

@ -1,68 +0,0 @@
name: Go
on:
push:
pull_request:
workflow_dispatch:
schedule:
- cron: "0 0 * * *"
jobs:
build:
runs-on: ubuntu-latest
env:
GOOSE_DRIVER: postgres
GOOSE_DBSTRING: "user=feedvault password=feedvault dbname=feedvault sslmode=disable host=localhost port=5432"
services:
postgres:
image: postgres:16
env:
POSTGRES_USER: feedvault
POSTGRES_PASSWORD: feedvault
POSTGRES_DB: feedvault
ports:
- 5432:5432
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
steps:
- uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@v5
with:
check-latest: true
go-version: stable
- name: Cache Go modules
uses: actions/cache@v4
with:
path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-
- name: Install Goose
run: go install github.com/pressly/goose/v3/cmd/goose@latest
- name: Goose Status
run: goose -dir sql/schema status
- name: Goose Up (Migrate Database)
run: goose -dir sql/schema up
- name: Test
run: go test ./... -v
- name: Lint
run: go vet ./...
- name: Fmt
run: go fmt ./...
- name: Tidy
run: go mod tidy
- name: Build
run: go build -v -ldflags="-s -w" .

9
.vscode/launch.json vendored
View file

@ -5,11 +5,12 @@
"version": "0.2.0",
"configurations": [
{
"name": "Launch Package",
"type": "go",
"name": "Python Debugger: Django",
"type": "debugpy",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}"
"program": "${workspaceFolder}\\manage.py",
"args": ["runserver"],
"django": true
}
]
}

22
.vscode/settings.json vendored
View file

@ -20,6 +20,7 @@
"DBSTRING",
"easybox",
"Eo's",
"errorreportsto",
"etxr",
"feedburner",
"feedi",
@ -46,6 +47,7 @@
"lmao",
"localbattle",
"localdomain",
"lscr",
"malformedurl",
"meowning",
"mmcdole",
@ -56,6 +58,7 @@
"Omnis",
"pacman",
"PGHOST",
"PGID",
"PGPORT",
"pgtype",
"PGUSER",
@ -63,6 +66,7 @@
"Prés",
"pressly",
"psql",
"PUID",
"Rawr",
"regexes",
"Retour",
@ -73,6 +77,7 @@
"speedport",
"sqlc",
"sslmode",
"staticfiles",
"steamloopback",
"stretchr",
"stylesheet",
@ -91,20 +96,5 @@
"webmail",
"XOXO",
"zerolog"
],
"terminal.integrated.env.windows": {
"GOOSE_DRIVER": "postgres",
"GOOSE_DBSTRING": "user=feedvault password=feedvault dbname=feedvault sslmode=disable",
"GOOSE_MIGRATION_DIR": "${workspaceFolder}/sql/schema"
},
"terminal.integrated.env.linux": {
"GOOSE_DRIVER": "postgres",
"GOOSE_DBSTRING": "user=feedvault password=feedvault dbname=feedvault sslmode=disable",
"GOOSE_MIGRATION_DIR": "${workspaceFolder}/sql/schema"
},
"terminal.integrated.env.osx": {
"GOOSE_DRIVER": "postgres",
"GOOSE_DBSTRING": "user=feedvault password=feedvault dbname=feedvault sslmode=disable",
"GOOSE_MIGRATION_DIR": "${workspaceFolder}/sql/schema"
}
]
}

View file

@ -1,56 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"log"
"time"
"github.com/TheLovinator1/FeedVault/db"
"github.com/jackc/pgx/v5/pgtype"
"github.com/mmcdole/gofeed"
)
func makeCreateFeedParams(feedURL string, feed *gofeed.Feed) db.CreateFeedParams {
var updatedTime time.Time
if feed.UpdatedParsed != nil {
updatedTime = *feed.UpdatedParsed
}
var publishedTime time.Time
if feed.PublishedParsed != nil {
publishedTime = *feed.PublishedParsed
}
feedCustom, err := json.Marshal(feed.Custom)
if err != nil {
fmt.Println("Error marshalling feed custom data:", err)
feedCustom = []byte("{}")
}
params := db.CreateFeedParams{
Url: feedURL,
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Title: pgtype.Text{String: feed.Title, Valid: feed.Title != ""},
Description: pgtype.Text{String: feed.Description, Valid: feed.Description != ""},
Link: pgtype.Text{String: feed.Link, Valid: feed.Link != ""},
FeedLink: pgtype.Text{String: feed.FeedLink, Valid: feed.FeedLink != ""},
Links: feed.Links,
Updated: pgtype.Text{String: feed.Updated, Valid: feed.Updated != ""},
UpdatedParsed: pgtype.Timestamptz{Time: updatedTime, Valid: !updatedTime.IsZero()},
Published: pgtype.Text{String: feed.Published, Valid: feed.Published != ""},
PublishedParsed: pgtype.Timestamptz{Time: publishedTime, Valid: !publishedTime.IsZero()},
Language: pgtype.Text{String: feed.Language, Valid: feed.Language != ""},
Copyright: pgtype.Text{String: feed.Copyright, Valid: feed.Copyright != ""},
Generator: pgtype.Text{String: feed.Generator, Valid: feed.Generator != ""},
Categories: feed.Categories,
Custom: feedCustom,
FeedType: pgtype.Text{String: feed.FeedType, Valid: feed.FeedType != ""},
FeedVersion: pgtype.Text{String: feed.FeedVersion, Valid: feed.FeedVersion != ""},
}
log.Printf("Created feed params: %+v", params)
return params
}

View file

@ -1,86 +0,0 @@
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"time"
"github.com/TheLovinator1/FeedVault/db"
"github.com/jackc/pgx/v5/pgtype"
"github.com/mmcdole/gofeed"
)
func addItemToDB(item *gofeed.Item, ctx context.Context, newFeed db.Feed) {
newItem, err := DB.CreateItem(ctx, makeCreateItemParams(item, newFeed.ID))
if err != nil {
log.Printf("Error adding item to database: %s", err)
}
// Add extensions to the database
addItemExtensionToDB(ctx, item, newItem)
// Add authors to the database
addItemAuthors(ctx, item, newItem)
// Add images to the database
if item.Image != nil {
addItemImages(ctx, item, newItem)
}
// Add Dublin Core to the database
createItemDublinCore(ctx, item, newItem)
// Add iTunes extensions to the database
_, err = createItemItunes(ctx, item, newItem)
if err != nil {
log.Printf("Error adding iTunes extensions to database: %s", err)
}
log.Printf("Item added to database")
}
func makeCreateItemParams(item *gofeed.Item, feedID int64) db.CreateItemParams {
var updatedTime time.Time
if item.UpdatedParsed != nil {
updatedTime = *item.UpdatedParsed
}
var publishedTime time.Time
if item.PublishedParsed != nil {
publishedTime = *item.PublishedParsed
}
itemCustom := []byte("{}")
if item.Custom != nil {
var err error
itemCustom, err = json.Marshal(item.Custom)
if err != nil {
fmt.Println("Error marshalling item custom data:", err)
itemCustom = []byte("{}")
}
}
params := db.CreateItemParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Title: pgtype.Text{String: item.Title, Valid: item.Title != ""},
Description: pgtype.Text{String: item.Description, Valid: item.Description != ""},
Content: pgtype.Text{String: item.Content, Valid: item.Content != ""},
Link: pgtype.Text{String: item.Link, Valid: item.Link != ""},
Links: item.Links,
Updated: pgtype.Text{String: item.Updated, Valid: item.Updated != ""},
UpdatedParsed: pgtype.Timestamptz{Time: updatedTime, Valid: !updatedTime.IsZero()},
Published: pgtype.Text{String: item.Published, Valid: item.Published != ""},
PublishedParsed: pgtype.Timestamptz{Time: publishedTime, Valid: !publishedTime.IsZero()},
Guid: pgtype.Text{String: item.GUID, Valid: item.GUID != ""},
Categories: item.Categories,
Custom: itemCustom,
FeedID: feedID,
}
log.Printf("Created item params: %+v", params)
return params
}

View file

@ -1,53 +0,0 @@
package main
import (
"context"
"log"
"time"
"github.com/TheLovinator1/FeedVault/db"
"github.com/jackc/pgx/v5/pgtype"
"github.com/mmcdole/gofeed"
)
func addFeedAuthors(ctx context.Context, feed *gofeed.Feed, newFeed db.Feed) {
if feed.Authors == nil {
log.Printf("No authors to add to database")
return
}
// Add authors to the database
for _, author := range feed.Authors {
_, err := DB.CreateFeedAuthor(ctx, db.CreateFeedAuthorParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Name: pgtype.Text{String: author.Name, Valid: author.Name != ""},
FeedID: newFeed.ID,
})
if err != nil {
log.Printf("Error adding author %s (%s) to database: %s", author.Name, author.Email, err)
continue
}
log.Printf("Author %s (%s) added to database", author.Name, author.Email)
}
}
func addItemAuthors(ctx context.Context, item *gofeed.Item, newItem db.Item) {
for _, author := range item.Authors {
_, err := DB.CreateItemAuthor(ctx, db.CreateItemAuthorParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Name: pgtype.Text{String: author.Name, Valid: author.Name != ""},
Email: pgtype.Text{String: author.Email, Valid: author.Email != ""},
ItemID: newItem.ID,
})
if err != nil {
log.Printf("Error adding author %s (%s) to database: %s", author.Name, author.Email, err)
continue
}
log.Printf("Author %s (%s) added to database", author.Name, author.Email)
}
}

View file

@ -1,32 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.25.0
package db
import (
"context"
"github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v5/pgconn"
)
type DBTX interface {
Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error)
Query(context.Context, string, ...interface{}) (pgx.Rows, error)
QueryRow(context.Context, string, ...interface{}) pgx.Row
}
func New(db DBTX) *Queries {
return &Queries{db: db}
}
type Queries struct {
db DBTX
}
func (q *Queries) WithTx(tx pgx.Tx) *Queries {
return &Queries{
db: tx,
}
}

View file

@ -1,1887 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.25.0
// source: feeds.sql
package db
import (
"context"
"github.com/jackc/pgx/v5/pgtype"
)
const countFeeds = `-- name: CountFeeds :one
SELECT
COUNT(*)
FROM
feeds
`
func (q *Queries) CountFeeds(ctx context.Context) (int64, error) {
row := q.db.QueryRow(ctx, countFeeds)
var count int64
err := row.Scan(&count)
return count, err
}
const countItems = `-- name: CountItems :one
SELECT
COUNT(*)
FROM
items
`
func (q *Queries) CountItems(ctx context.Context) (int64, error) {
row := q.db.QueryRow(ctx, countItems)
var count int64
err := row.Scan(&count)
return count, err
}
const createFeed = `-- name: CreateFeed :one
INSERT INTO
feeds (
"url",
created_at,
updated_at,
deleted_at,
title,
"description",
link,
feed_link,
links,
updated,
updated_parsed,
published,
published_parsed,
"language",
copyright,
generator,
categories,
custom,
feed_type,
feed_version
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14,
$15,
$16,
$17,
$18,
$19,
$20
)
RETURNING
id, url, created_at, updated_at, deleted_at, title, description, link, feed_link, links, updated, updated_parsed, published, published_parsed, language, copyright, generator, categories, custom, feed_type, feed_version
`
type CreateFeedParams struct {
Url string `json:"url"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Title pgtype.Text `json:"title"`
Description pgtype.Text `json:"description"`
Link pgtype.Text `json:"link"`
FeedLink pgtype.Text `json:"feed_link"`
Links []string `json:"links"`
Updated pgtype.Text `json:"updated"`
UpdatedParsed pgtype.Timestamptz `json:"updated_parsed"`
Published pgtype.Text `json:"published"`
PublishedParsed pgtype.Timestamptz `json:"published_parsed"`
Language pgtype.Text `json:"language"`
Copyright pgtype.Text `json:"copyright"`
Generator pgtype.Text `json:"generator"`
Categories []string `json:"categories"`
Custom []byte `json:"custom"`
FeedType pgtype.Text `json:"feed_type"`
FeedVersion pgtype.Text `json:"feed_version"`
}
func (q *Queries) CreateFeed(ctx context.Context, arg CreateFeedParams) (Feed, error) {
row := q.db.QueryRow(ctx, createFeed,
arg.Url,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Title,
arg.Description,
arg.Link,
arg.FeedLink,
arg.Links,
arg.Updated,
arg.UpdatedParsed,
arg.Published,
arg.PublishedParsed,
arg.Language,
arg.Copyright,
arg.Generator,
arg.Categories,
arg.Custom,
arg.FeedType,
arg.FeedVersion,
)
var i Feed
err := row.Scan(
&i.ID,
&i.Url,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Title,
&i.Description,
&i.Link,
&i.FeedLink,
&i.Links,
&i.Updated,
&i.UpdatedParsed,
&i.Published,
&i.PublishedParsed,
&i.Language,
&i.Copyright,
&i.Generator,
&i.Categories,
&i.Custom,
&i.FeedType,
&i.FeedVersion,
)
return i, err
}
const createFeedAuthor = `-- name: CreateFeedAuthor :one
INSERT INTO
feed_authors (
created_at,
updated_at,
deleted_at,
"name",
email,
feed_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
id, created_at, updated_at, deleted_at, name, email, feed_id
`
type CreateFeedAuthorParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Name pgtype.Text `json:"name"`
Email pgtype.Text `json:"email"`
FeedID int64 `json:"feed_id"`
}
func (q *Queries) CreateFeedAuthor(ctx context.Context, arg CreateFeedAuthorParams) (FeedAuthor, error) {
row := q.db.QueryRow(ctx, createFeedAuthor,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Name,
arg.Email,
arg.FeedID,
)
var i FeedAuthor
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Name,
&i.Email,
&i.FeedID,
)
return i, err
}
const createFeedDublinCore = `-- name: CreateFeedDublinCore :one
INSERT INTO
feed_dublin_cores (
created_at,
updated_at,
deleted_at,
title,
creator,
author,
"subject",
"description",
publisher,
contributor,
"date",
"type",
format,
identifier,
source,
"language",
relation,
coverage,
rights,
feed_id
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14,
$15,
$16,
$17,
$18,
$19,
$20
)
RETURNING
id, created_at, updated_at, deleted_at, title, creator, author, subject, description, publisher, contributor, date, type, format, identifier, source, language, relation, coverage, rights, feed_id
`
type CreateFeedDublinCoreParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Title []string `json:"title"`
Creator []string `json:"creator"`
Author []string `json:"author"`
Subject []string `json:"subject"`
Description []string `json:"description"`
Publisher []string `json:"publisher"`
Contributor []string `json:"contributor"`
Date []string `json:"date"`
Type []string `json:"type"`
Format []string `json:"format"`
Identifier []string `json:"identifier"`
Source []string `json:"source"`
Language []string `json:"language"`
Relation []string `json:"relation"`
Coverage []string `json:"coverage"`
Rights []string `json:"rights"`
FeedID int64 `json:"feed_id"`
}
func (q *Queries) CreateFeedDublinCore(ctx context.Context, arg CreateFeedDublinCoreParams) (FeedDublinCore, error) {
row := q.db.QueryRow(ctx, createFeedDublinCore,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Title,
arg.Creator,
arg.Author,
arg.Subject,
arg.Description,
arg.Publisher,
arg.Contributor,
arg.Date,
arg.Type,
arg.Format,
arg.Identifier,
arg.Source,
arg.Language,
arg.Relation,
arg.Coverage,
arg.Rights,
arg.FeedID,
)
var i FeedDublinCore
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Title,
&i.Creator,
&i.Author,
&i.Subject,
&i.Description,
&i.Publisher,
&i.Contributor,
&i.Date,
&i.Type,
&i.Format,
&i.Identifier,
&i.Source,
&i.Language,
&i.Relation,
&i.Coverage,
&i.Rights,
&i.FeedID,
)
return i, err
}
const createFeedExtension = `-- name: CreateFeedExtension :one
INSERT INTO
feed_extensions (
created_at,
updated_at,
deleted_at,
"name",
"value",
attrs,
children,
feed_id
)
VALUES
($1, $2, $3, $4, $5, $6, $7, $8)
RETURNING
id, created_at, updated_at, deleted_at, name, value, attrs, children, feed_id
`
type CreateFeedExtensionParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Name pgtype.Text `json:"name"`
Value pgtype.Text `json:"value"`
Attrs []byte `json:"attrs"`
Children []byte `json:"children"`
FeedID int64 `json:"feed_id"`
}
func (q *Queries) CreateFeedExtension(ctx context.Context, arg CreateFeedExtensionParams) (FeedExtension, error) {
row := q.db.QueryRow(ctx, createFeedExtension,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Name,
arg.Value,
arg.Attrs,
arg.Children,
arg.FeedID,
)
var i FeedExtension
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Name,
&i.Value,
&i.Attrs,
&i.Children,
&i.FeedID,
)
return i, err
}
const createFeedImage = `-- name: CreateFeedImage :one
INSERT INTO
feed_images (
created_at,
updated_at,
deleted_at,
"url",
title,
feed_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
id, created_at, updated_at, deleted_at, url, title, feed_id
`
type CreateFeedImageParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Url pgtype.Text `json:"url"`
Title pgtype.Text `json:"title"`
FeedID int64 `json:"feed_id"`
}
func (q *Queries) CreateFeedImage(ctx context.Context, arg CreateFeedImageParams) (FeedImage, error) {
row := q.db.QueryRow(ctx, createFeedImage,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Url,
arg.Title,
arg.FeedID,
)
var i FeedImage
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Url,
&i.Title,
&i.FeedID,
)
return i, err
}
const createFeedItunes = `-- name: CreateFeedItunes :one
INSERT INTO
feed_itunes (
created_at,
updated_at,
deleted_at,
author,
"block",
"explicit",
keywords,
subtitle,
summary,
"image",
complete,
new_feed_url,
"type",
feed_id
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14
)
RETURNING
id, created_at, updated_at, deleted_at, author, block, explicit, keywords, subtitle, summary, image, complete, new_feed_url, type, feed_id
`
type CreateFeedItunesParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Author pgtype.Text `json:"author"`
Block pgtype.Text `json:"block"`
Explicit pgtype.Text `json:"explicit"`
Keywords pgtype.Text `json:"keywords"`
Subtitle pgtype.Text `json:"subtitle"`
Summary pgtype.Text `json:"summary"`
Image pgtype.Text `json:"image"`
Complete pgtype.Text `json:"complete"`
NewFeedUrl pgtype.Text `json:"new_feed_url"`
Type pgtype.Text `json:"type"`
FeedID int64 `json:"feed_id"`
}
func (q *Queries) CreateFeedItunes(ctx context.Context, arg CreateFeedItunesParams) (FeedItune, error) {
row := q.db.QueryRow(ctx, createFeedItunes,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Author,
arg.Block,
arg.Explicit,
arg.Keywords,
arg.Subtitle,
arg.Summary,
arg.Image,
arg.Complete,
arg.NewFeedUrl,
arg.Type,
arg.FeedID,
)
var i FeedItune
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Author,
&i.Block,
&i.Explicit,
&i.Keywords,
&i.Subtitle,
&i.Summary,
&i.Image,
&i.Complete,
&i.NewFeedUrl,
&i.Type,
&i.FeedID,
)
return i, err
}
const createFeedItunesCategory = `-- name: CreateFeedItunesCategory :one
INSERT INTO
feed_itunes_categories (
created_at,
updated_at,
deleted_at,
"text",
subcategory,
itunes_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
id, created_at, updated_at, deleted_at, text, subcategory, itunes_id
`
type CreateFeedItunesCategoryParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Text pgtype.Text `json:"text"`
Subcategory pgtype.Int8 `json:"subcategory"`
ItunesID int64 `json:"itunes_id"`
}
func (q *Queries) CreateFeedItunesCategory(ctx context.Context, arg CreateFeedItunesCategoryParams) (FeedItunesCategory, error) {
row := q.db.QueryRow(ctx, createFeedItunesCategory,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Text,
arg.Subcategory,
arg.ItunesID,
)
var i FeedItunesCategory
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Text,
&i.Subcategory,
&i.ItunesID,
)
return i, err
}
const createFeedItunesOwner = `-- name: CreateFeedItunesOwner :one
INSERT INTO
feed_itunes_owners (
created_at,
updated_at,
deleted_at,
email,
"name",
itunes_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
id, created_at, updated_at, deleted_at, email, name, itunes_id
`
type CreateFeedItunesOwnerParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Email pgtype.Text `json:"email"`
Name pgtype.Text `json:"name"`
ItunesID int64 `json:"itunes_id"`
}
func (q *Queries) CreateFeedItunesOwner(ctx context.Context, arg CreateFeedItunesOwnerParams) (FeedItunesOwner, error) {
row := q.db.QueryRow(ctx, createFeedItunesOwner,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Email,
arg.Name,
arg.ItunesID,
)
var i FeedItunesOwner
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Email,
&i.Name,
&i.ItunesID,
)
return i, err
}
const createItem = `-- name: CreateItem :one
INSERT INTO
items (
created_at,
updated_at,
deleted_at,
title,
"description",
content,
link,
links,
updated,
updated_parsed,
published,
published_parsed,
"guid",
categories,
custom,
feed_id
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14,
$15,
$16
)
RETURNING
id, created_at, updated_at, deleted_at, title, description, content, link, links, updated, updated_parsed, published, published_parsed, guid, categories, custom, feed_id
`
type CreateItemParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Title pgtype.Text `json:"title"`
Description pgtype.Text `json:"description"`
Content pgtype.Text `json:"content"`
Link pgtype.Text `json:"link"`
Links []string `json:"links"`
Updated pgtype.Text `json:"updated"`
UpdatedParsed pgtype.Timestamptz `json:"updated_parsed"`
Published pgtype.Text `json:"published"`
PublishedParsed pgtype.Timestamptz `json:"published_parsed"`
Guid pgtype.Text `json:"guid"`
Categories []string `json:"categories"`
Custom []byte `json:"custom"`
FeedID int64 `json:"feed_id"`
}
func (q *Queries) CreateItem(ctx context.Context, arg CreateItemParams) (Item, error) {
row := q.db.QueryRow(ctx, createItem,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Title,
arg.Description,
arg.Content,
arg.Link,
arg.Links,
arg.Updated,
arg.UpdatedParsed,
arg.Published,
arg.PublishedParsed,
arg.Guid,
arg.Categories,
arg.Custom,
arg.FeedID,
)
var i Item
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Title,
&i.Description,
&i.Content,
&i.Link,
&i.Links,
&i.Updated,
&i.UpdatedParsed,
&i.Published,
&i.PublishedParsed,
&i.Guid,
&i.Categories,
&i.Custom,
&i.FeedID,
)
return i, err
}
const createItemAuthor = `-- name: CreateItemAuthor :one
INSERT INTO
item_authors (
created_at,
updated_at,
deleted_at,
"name",
email,
item_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
id, created_at, updated_at, deleted_at, name, email, item_id
`
type CreateItemAuthorParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Name pgtype.Text `json:"name"`
Email pgtype.Text `json:"email"`
ItemID int64 `json:"item_id"`
}
func (q *Queries) CreateItemAuthor(ctx context.Context, arg CreateItemAuthorParams) (ItemAuthor, error) {
row := q.db.QueryRow(ctx, createItemAuthor,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Name,
arg.Email,
arg.ItemID,
)
var i ItemAuthor
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Name,
&i.Email,
&i.ItemID,
)
return i, err
}
const createItemDublinCore = `-- name: CreateItemDublinCore :one
INSERT INTO
item_dublin_cores (
created_at,
updated_at,
deleted_at,
title,
creator,
author,
"subject",
"description",
publisher,
contributor,
"date",
"type",
format,
identifier,
source,
"language",
relation,
coverage,
rights,
item_id
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14,
$15,
$16,
$17,
$18,
$19,
$20
)
RETURNING
id, created_at, updated_at, deleted_at, title, creator, author, subject, description, publisher, contributor, date, type, format, identifier, source, language, relation, coverage, rights, item_id
`
type CreateItemDublinCoreParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Title []string `json:"title"`
Creator []string `json:"creator"`
Author []string `json:"author"`
Subject []string `json:"subject"`
Description []string `json:"description"`
Publisher []string `json:"publisher"`
Contributor []string `json:"contributor"`
Date []string `json:"date"`
Type []string `json:"type"`
Format []string `json:"format"`
Identifier []string `json:"identifier"`
Source []string `json:"source"`
Language []string `json:"language"`
Relation []string `json:"relation"`
Coverage []string `json:"coverage"`
Rights []string `json:"rights"`
ItemID int64 `json:"item_id"`
}
func (q *Queries) CreateItemDublinCore(ctx context.Context, arg CreateItemDublinCoreParams) (ItemDublinCore, error) {
row := q.db.QueryRow(ctx, createItemDublinCore,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Title,
arg.Creator,
arg.Author,
arg.Subject,
arg.Description,
arg.Publisher,
arg.Contributor,
arg.Date,
arg.Type,
arg.Format,
arg.Identifier,
arg.Source,
arg.Language,
arg.Relation,
arg.Coverage,
arg.Rights,
arg.ItemID,
)
var i ItemDublinCore
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Title,
&i.Creator,
&i.Author,
&i.Subject,
&i.Description,
&i.Publisher,
&i.Contributor,
&i.Date,
&i.Type,
&i.Format,
&i.Identifier,
&i.Source,
&i.Language,
&i.Relation,
&i.Coverage,
&i.Rights,
&i.ItemID,
)
return i, err
}
const createItemExtension = `-- name: CreateItemExtension :one
INSERT INTO
item_extensions (
created_at,
updated_at,
deleted_at,
"name",
"value",
attrs,
children,
item_id
)
VALUES
($1, $2, $3, $4, $5, $6, $7, $8)
RETURNING
id, created_at, updated_at, deleted_at, name, value, attrs, children, item_id
`
type CreateItemExtensionParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Name pgtype.Text `json:"name"`
Value pgtype.Text `json:"value"`
Attrs []byte `json:"attrs"`
Children []byte `json:"children"`
ItemID int64 `json:"item_id"`
}
func (q *Queries) CreateItemExtension(ctx context.Context, arg CreateItemExtensionParams) (ItemExtension, error) {
row := q.db.QueryRow(ctx, createItemExtension,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Name,
arg.Value,
arg.Attrs,
arg.Children,
arg.ItemID,
)
var i ItemExtension
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Name,
&i.Value,
&i.Attrs,
&i.Children,
&i.ItemID,
)
return i, err
}
const createItemImage = `-- name: CreateItemImage :one
INSERT INTO
item_images (
created_at,
updated_at,
deleted_at,
"url",
title,
item_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
id, created_at, updated_at, deleted_at, url, title, item_id
`
type CreateItemImageParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Url pgtype.Text `json:"url"`
Title pgtype.Text `json:"title"`
ItemID int64 `json:"item_id"`
}
func (q *Queries) CreateItemImage(ctx context.Context, arg CreateItemImageParams) (ItemImage, error) {
row := q.db.QueryRow(ctx, createItemImage,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Url,
arg.Title,
arg.ItemID,
)
var i ItemImage
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Url,
&i.Title,
&i.ItemID,
)
return i, err
}
const createItemItunes = `-- name: CreateItemItunes :one
INSERT INTO
item_itunes (
created_at,
updated_at,
deleted_at,
author,
"block",
"explicit",
keywords,
subtitle,
summary,
"image",
is_closed_captioned,
episode,
season,
"order",
episode_type,
item_id
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14,
$15,
$16
)
RETURNING
id, created_at, updated_at, deleted_at, author, block, duration, explicit, keywords, subtitle, summary, image, is_closed_captioned, episode, season, "order", episode_type, item_id
`
type CreateItemItunesParams struct {
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Author pgtype.Text `json:"author"`
Block pgtype.Text `json:"block"`
Explicit pgtype.Text `json:"explicit"`
Keywords pgtype.Text `json:"keywords"`
Subtitle pgtype.Text `json:"subtitle"`
Summary pgtype.Text `json:"summary"`
Image pgtype.Text `json:"image"`
IsClosedCaptioned pgtype.Text `json:"is_closed_captioned"`
Episode pgtype.Text `json:"episode"`
Season pgtype.Text `json:"season"`
Order pgtype.Text `json:"order"`
EpisodeType pgtype.Text `json:"episode_type"`
ItemID int64 `json:"item_id"`
}
func (q *Queries) CreateItemItunes(ctx context.Context, arg CreateItemItunesParams) (ItemItune, error) {
row := q.db.QueryRow(ctx, createItemItunes,
arg.CreatedAt,
arg.UpdatedAt,
arg.DeletedAt,
arg.Author,
arg.Block,
arg.Explicit,
arg.Keywords,
arg.Subtitle,
arg.Summary,
arg.Image,
arg.IsClosedCaptioned,
arg.Episode,
arg.Season,
arg.Order,
arg.EpisodeType,
arg.ItemID,
)
var i ItemItune
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Author,
&i.Block,
&i.Duration,
&i.Explicit,
&i.Keywords,
&i.Subtitle,
&i.Summary,
&i.Image,
&i.IsClosedCaptioned,
&i.Episode,
&i.Season,
&i.Order,
&i.EpisodeType,
&i.ItemID,
)
return i, err
}
const getFeed = `-- name: GetFeed :one
SELECT
id, url, created_at, updated_at, deleted_at, title, description, link, feed_link, links, updated, updated_parsed, published, published_parsed, language, copyright, generator, categories, custom, feed_type, feed_version
FROM
feeds
WHERE
id = $1
`
func (q *Queries) GetFeed(ctx context.Context, id int64) (Feed, error) {
row := q.db.QueryRow(ctx, getFeed, id)
var i Feed
err := row.Scan(
&i.ID,
&i.Url,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Title,
&i.Description,
&i.Link,
&i.FeedLink,
&i.Links,
&i.Updated,
&i.UpdatedParsed,
&i.Published,
&i.PublishedParsed,
&i.Language,
&i.Copyright,
&i.Generator,
&i.Categories,
&i.Custom,
&i.FeedType,
&i.FeedVersion,
)
return i, err
}
const getFeedAuthors = `-- name: GetFeedAuthors :many
SELECT
id, created_at, updated_at, deleted_at, name, email, feed_id
FROM
feed_authors
WHERE
feed_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetFeedAuthorsParams struct {
FeedID int64 `json:"feed_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetFeedAuthors(ctx context.Context, arg GetFeedAuthorsParams) ([]FeedAuthor, error) {
rows, err := q.db.Query(ctx, getFeedAuthors, arg.FeedID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []FeedAuthor{}
for rows.Next() {
var i FeedAuthor
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Name,
&i.Email,
&i.FeedID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getFeedDublinCores = `-- name: GetFeedDublinCores :many
SELECT
id, created_at, updated_at, deleted_at, title, creator, author, subject, description, publisher, contributor, date, type, format, identifier, source, language, relation, coverage, rights, feed_id
FROM
feed_dublin_cores
WHERE
feed_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetFeedDublinCoresParams struct {
FeedID int64 `json:"feed_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetFeedDublinCores(ctx context.Context, arg GetFeedDublinCoresParams) ([]FeedDublinCore, error) {
rows, err := q.db.Query(ctx, getFeedDublinCores, arg.FeedID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []FeedDublinCore{}
for rows.Next() {
var i FeedDublinCore
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Title,
&i.Creator,
&i.Author,
&i.Subject,
&i.Description,
&i.Publisher,
&i.Contributor,
&i.Date,
&i.Type,
&i.Format,
&i.Identifier,
&i.Source,
&i.Language,
&i.Relation,
&i.Coverage,
&i.Rights,
&i.FeedID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getFeedExtensions = `-- name: GetFeedExtensions :many
SELECT
id, created_at, updated_at, deleted_at, name, value, attrs, children, feed_id
FROM
feed_extensions
WHERE
feed_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetFeedExtensionsParams struct {
FeedID int64 `json:"feed_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetFeedExtensions(ctx context.Context, arg GetFeedExtensionsParams) ([]FeedExtension, error) {
rows, err := q.db.Query(ctx, getFeedExtensions, arg.FeedID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []FeedExtension{}
for rows.Next() {
var i FeedExtension
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Name,
&i.Value,
&i.Attrs,
&i.Children,
&i.FeedID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getFeedImages = `-- name: GetFeedImages :many
SELECT
id, created_at, updated_at, deleted_at, url, title, feed_id
FROM
feed_images
WHERE
feed_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetFeedImagesParams struct {
FeedID int64 `json:"feed_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetFeedImages(ctx context.Context, arg GetFeedImagesParams) ([]FeedImage, error) {
rows, err := q.db.Query(ctx, getFeedImages, arg.FeedID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []FeedImage{}
for rows.Next() {
var i FeedImage
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Url,
&i.Title,
&i.FeedID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getFeedItunes = `-- name: GetFeedItunes :one
SELECT
id, created_at, updated_at, deleted_at, author, block, explicit, keywords, subtitle, summary, image, complete, new_feed_url, type, feed_id
FROM
feed_itunes
WHERE
feed_id = $1
`
func (q *Queries) GetFeedItunes(ctx context.Context, feedID int64) (FeedItune, error) {
row := q.db.QueryRow(ctx, getFeedItunes, feedID)
var i FeedItune
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Author,
&i.Block,
&i.Explicit,
&i.Keywords,
&i.Subtitle,
&i.Summary,
&i.Image,
&i.Complete,
&i.NewFeedUrl,
&i.Type,
&i.FeedID,
)
return i, err
}
const getFeedItunesCategories = `-- name: GetFeedItunesCategories :many
SELECT
id, created_at, updated_at, deleted_at, text, subcategory, itunes_id
FROM
feed_itunes_categories
WHERE
itunes_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetFeedItunesCategoriesParams struct {
ItunesID int64 `json:"itunes_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetFeedItunesCategories(ctx context.Context, arg GetFeedItunesCategoriesParams) ([]FeedItunesCategory, error) {
rows, err := q.db.Query(ctx, getFeedItunesCategories, arg.ItunesID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []FeedItunesCategory{}
for rows.Next() {
var i FeedItunesCategory
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Text,
&i.Subcategory,
&i.ItunesID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getFeedItunesOwners = `-- name: GetFeedItunesOwners :many
SELECT
id, created_at, updated_at, deleted_at, email, name, itunes_id
FROM
feed_itunes_owners
WHERE
itunes_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetFeedItunesOwnersParams struct {
ItunesID int64 `json:"itunes_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetFeedItunesOwners(ctx context.Context, arg GetFeedItunesOwnersParams) ([]FeedItunesOwner, error) {
rows, err := q.db.Query(ctx, getFeedItunesOwners, arg.ItunesID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []FeedItunesOwner{}
for rows.Next() {
var i FeedItunesOwner
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Email,
&i.Name,
&i.ItunesID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getFeeds = `-- name: GetFeeds :many
SELECT
id, url, created_at, updated_at, deleted_at, title, description, link, feed_link, links, updated, updated_parsed, published, published_parsed, language, copyright, generator, categories, custom, feed_type, feed_version
FROM
feeds
ORDER BY
created_at DESC
LIMIT
$1
OFFSET
$2
`
type GetFeedsParams struct {
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetFeeds(ctx context.Context, arg GetFeedsParams) ([]Feed, error) {
rows, err := q.db.Query(ctx, getFeeds, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []Feed{}
for rows.Next() {
var i Feed
if err := rows.Scan(
&i.ID,
&i.Url,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Title,
&i.Description,
&i.Link,
&i.FeedLink,
&i.Links,
&i.Updated,
&i.UpdatedParsed,
&i.Published,
&i.PublishedParsed,
&i.Language,
&i.Copyright,
&i.Generator,
&i.Categories,
&i.Custom,
&i.FeedType,
&i.FeedVersion,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getItem = `-- name: GetItem :one
SELECT
id, created_at, updated_at, deleted_at, title, description, content, link, links, updated, updated_parsed, published, published_parsed, guid, categories, custom, feed_id
FROM
items
WHERE
id = $1
`
func (q *Queries) GetItem(ctx context.Context, id int64) (Item, error) {
row := q.db.QueryRow(ctx, getItem, id)
var i Item
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Title,
&i.Description,
&i.Content,
&i.Link,
&i.Links,
&i.Updated,
&i.UpdatedParsed,
&i.Published,
&i.PublishedParsed,
&i.Guid,
&i.Categories,
&i.Custom,
&i.FeedID,
)
return i, err
}
const getItemAuthors = `-- name: GetItemAuthors :many
SELECT
id, created_at, updated_at, deleted_at, name, email, item_id
FROM
item_authors
WHERE
item_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetItemAuthorsParams struct {
ItemID int64 `json:"item_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetItemAuthors(ctx context.Context, arg GetItemAuthorsParams) ([]ItemAuthor, error) {
rows, err := q.db.Query(ctx, getItemAuthors, arg.ItemID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []ItemAuthor{}
for rows.Next() {
var i ItemAuthor
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Name,
&i.Email,
&i.ItemID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getItemDublinCores = `-- name: GetItemDublinCores :many
SELECT
id, created_at, updated_at, deleted_at, title, creator, author, subject, description, publisher, contributor, date, type, format, identifier, source, language, relation, coverage, rights, item_id
FROM
item_dublin_cores
WHERE
item_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetItemDublinCoresParams struct {
ItemID int64 `json:"item_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetItemDublinCores(ctx context.Context, arg GetItemDublinCoresParams) ([]ItemDublinCore, error) {
rows, err := q.db.Query(ctx, getItemDublinCores, arg.ItemID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []ItemDublinCore{}
for rows.Next() {
var i ItemDublinCore
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Title,
&i.Creator,
&i.Author,
&i.Subject,
&i.Description,
&i.Publisher,
&i.Contributor,
&i.Date,
&i.Type,
&i.Format,
&i.Identifier,
&i.Source,
&i.Language,
&i.Relation,
&i.Coverage,
&i.Rights,
&i.ItemID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getItemExtensions = `-- name: GetItemExtensions :many
SELECT
id, created_at, updated_at, deleted_at, name, value, attrs, children, item_id
FROM
item_extensions
WHERE
item_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetItemExtensionsParams struct {
ItemID int64 `json:"item_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetItemExtensions(ctx context.Context, arg GetItemExtensionsParams) ([]ItemExtension, error) {
rows, err := q.db.Query(ctx, getItemExtensions, arg.ItemID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []ItemExtension{}
for rows.Next() {
var i ItemExtension
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Name,
&i.Value,
&i.Attrs,
&i.Children,
&i.ItemID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getItemImages = `-- name: GetItemImages :many
SELECT
id, created_at, updated_at, deleted_at, url, title, item_id
FROM
item_images
WHERE
item_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetItemImagesParams struct {
ItemID int64 `json:"item_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetItemImages(ctx context.Context, arg GetItemImagesParams) ([]ItemImage, error) {
rows, err := q.db.Query(ctx, getItemImages, arg.ItemID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []ItemImage{}
for rows.Next() {
var i ItemImage
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Url,
&i.Title,
&i.ItemID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getItemItunes = `-- name: GetItemItunes :one
SELECT
id, created_at, updated_at, deleted_at, author, block, duration, explicit, keywords, subtitle, summary, image, is_closed_captioned, episode, season, "order", episode_type, item_id
FROM
item_itunes
WHERE
item_id = $1
`
func (q *Queries) GetItemItunes(ctx context.Context, itemID int64) (ItemItune, error) {
row := q.db.QueryRow(ctx, getItemItunes, itemID)
var i ItemItune
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Author,
&i.Block,
&i.Duration,
&i.Explicit,
&i.Keywords,
&i.Subtitle,
&i.Summary,
&i.Image,
&i.IsClosedCaptioned,
&i.Episode,
&i.Season,
&i.Order,
&i.EpisodeType,
&i.ItemID,
)
return i, err
}
const getItems = `-- name: GetItems :many
SELECT
id, created_at, updated_at, deleted_at, title, description, content, link, links, updated, updated_parsed, published, published_parsed, guid, categories, custom, feed_id
FROM
items
WHERE
feed_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3
`
type GetItemsParams struct {
FeedID int64 `json:"feed_id"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
}
func (q *Queries) GetItems(ctx context.Context, arg GetItemsParams) ([]Item, error) {
rows, err := q.db.Query(ctx, getItems, arg.FeedID, arg.Limit, arg.Offset)
if err != nil {
return nil, err
}
defer rows.Close()
items := []Item{}
for rows.Next() {
var i Item
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Title,
&i.Description,
&i.Content,
&i.Link,
&i.Links,
&i.Updated,
&i.UpdatedParsed,
&i.Published,
&i.PublishedParsed,
&i.Guid,
&i.Categories,
&i.Custom,
&i.FeedID,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}

View file

@ -1,235 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.25.0
package db
import (
"github.com/jackc/pgx/v5/pgtype"
)
type Enclosure struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Url pgtype.Text `json:"url"`
Length pgtype.Text `json:"length"`
Type pgtype.Text `json:"type"`
ItemID int64 `json:"item_id"`
}
type Feed struct {
ID int64 `json:"id"`
Url string `json:"url"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Title pgtype.Text `json:"title"`
Description pgtype.Text `json:"description"`
Link pgtype.Text `json:"link"`
FeedLink pgtype.Text `json:"feed_link"`
Links []string `json:"links"`
Updated pgtype.Text `json:"updated"`
UpdatedParsed pgtype.Timestamptz `json:"updated_parsed"`
Published pgtype.Text `json:"published"`
PublishedParsed pgtype.Timestamptz `json:"published_parsed"`
Language pgtype.Text `json:"language"`
Copyright pgtype.Text `json:"copyright"`
Generator pgtype.Text `json:"generator"`
Categories []string `json:"categories"`
Custom []byte `json:"custom"`
FeedType pgtype.Text `json:"feed_type"`
FeedVersion pgtype.Text `json:"feed_version"`
}
type FeedAuthor struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Name pgtype.Text `json:"name"`
Email pgtype.Text `json:"email"`
FeedID int64 `json:"feed_id"`
}
type FeedDublinCore struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Title []string `json:"title"`
Creator []string `json:"creator"`
Author []string `json:"author"`
Subject []string `json:"subject"`
Description []string `json:"description"`
Publisher []string `json:"publisher"`
Contributor []string `json:"contributor"`
Date []string `json:"date"`
Type []string `json:"type"`
Format []string `json:"format"`
Identifier []string `json:"identifier"`
Source []string `json:"source"`
Language []string `json:"language"`
Relation []string `json:"relation"`
Coverage []string `json:"coverage"`
Rights []string `json:"rights"`
FeedID int64 `json:"feed_id"`
}
type FeedExtension struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Name pgtype.Text `json:"name"`
Value pgtype.Text `json:"value"`
Attrs []byte `json:"attrs"`
Children []byte `json:"children"`
FeedID int64 `json:"feed_id"`
}
type FeedImage struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Url pgtype.Text `json:"url"`
Title pgtype.Text `json:"title"`
FeedID int64 `json:"feed_id"`
}
type FeedItune struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Author pgtype.Text `json:"author"`
Block pgtype.Text `json:"block"`
Explicit pgtype.Text `json:"explicit"`
Keywords pgtype.Text `json:"keywords"`
Subtitle pgtype.Text `json:"subtitle"`
Summary pgtype.Text `json:"summary"`
Image pgtype.Text `json:"image"`
Complete pgtype.Text `json:"complete"`
NewFeedUrl pgtype.Text `json:"new_feed_url"`
Type pgtype.Text `json:"type"`
FeedID int64 `json:"feed_id"`
}
type FeedItunesCategory struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Text pgtype.Text `json:"text"`
Subcategory pgtype.Int8 `json:"subcategory"`
ItunesID int64 `json:"itunes_id"`
}
type FeedItunesOwner struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Email pgtype.Text `json:"email"`
Name pgtype.Text `json:"name"`
ItunesID int64 `json:"itunes_id"`
}
type Item struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Title pgtype.Text `json:"title"`
Description pgtype.Text `json:"description"`
Content pgtype.Text `json:"content"`
Link pgtype.Text `json:"link"`
Links []string `json:"links"`
Updated pgtype.Text `json:"updated"`
UpdatedParsed pgtype.Timestamptz `json:"updated_parsed"`
Published pgtype.Text `json:"published"`
PublishedParsed pgtype.Timestamptz `json:"published_parsed"`
Guid pgtype.Text `json:"guid"`
Categories []string `json:"categories"`
Custom []byte `json:"custom"`
FeedID int64 `json:"feed_id"`
}
type ItemAuthor struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Name pgtype.Text `json:"name"`
Email pgtype.Text `json:"email"`
ItemID int64 `json:"item_id"`
}
type ItemDublinCore struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Title []string `json:"title"`
Creator []string `json:"creator"`
Author []string `json:"author"`
Subject []string `json:"subject"`
Description []string `json:"description"`
Publisher []string `json:"publisher"`
Contributor []string `json:"contributor"`
Date []string `json:"date"`
Type []string `json:"type"`
Format []string `json:"format"`
Identifier []string `json:"identifier"`
Source []string `json:"source"`
Language []string `json:"language"`
Relation []string `json:"relation"`
Coverage []string `json:"coverage"`
Rights []string `json:"rights"`
ItemID int64 `json:"item_id"`
}
type ItemExtension struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Name pgtype.Text `json:"name"`
Value pgtype.Text `json:"value"`
Attrs []byte `json:"attrs"`
Children []byte `json:"children"`
ItemID int64 `json:"item_id"`
}
type ItemImage struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Url pgtype.Text `json:"url"`
Title pgtype.Text `json:"title"`
ItemID int64 `json:"item_id"`
}
type ItemItune struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Author pgtype.Text `json:"author"`
Block pgtype.Text `json:"block"`
Duration pgtype.Text `json:"duration"`
Explicit pgtype.Text `json:"explicit"`
Keywords pgtype.Text `json:"keywords"`
Subtitle pgtype.Text `json:"subtitle"`
Summary pgtype.Text `json:"summary"`
Image pgtype.Text `json:"image"`
IsClosedCaptioned pgtype.Text `json:"is_closed_captioned"`
Episode pgtype.Text `json:"episode"`
Season pgtype.Text `json:"season"`
Order pgtype.Text `json:"order"`
EpisodeType pgtype.Text `json:"episode_type"`
ItemID int64 `json:"item_id"`
}

View file

@ -1,22 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.25.0
package db
import (
"context"
)
type Querier interface {
CountFeeds(ctx context.Context) (int64, error)
CountItems(ctx context.Context) (int64, error)
CreateFeed(ctx context.Context, arg CreateFeedParams) (Feed, error)
CreateItem(ctx context.Context, arg CreateItemParams) (Item, error)
GetFeed(ctx context.Context, id int64) (Feed, error)
GetFeeds(ctx context.Context, arg GetFeedsParams) ([]Feed, error)
GetItem(ctx context.Context, id int64) (Item, error)
GetItems(ctx context.Context, arg GetItemsParams) ([]Item, error)
}
var _ Querier = (*Queries)(nil)

View file

@ -1,21 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.25.0
// source: stats.sql
package db
import (
"context"
)
const dBSize = `-- name: DBSize :one
SELECT pg_size_pretty(pg_database_size(current_database()))
`
func (q *Queries) DBSize(ctx context.Context) (string, error) {
row := q.db.QueryRow(ctx, dBSize)
var pg_size_pretty string
err := row.Scan(&pg_size_pretty)
return pg_size_pretty, err
}

View file

@ -1,14 +1,70 @@
services:
db:
image: postgres:16
container_name: feedvault-db
restart: unless-stopped
# Django - Web framework
feedvault: &feedvault
container_name: feedvault
image: ghcr.io/thelovinator1/feedvault:latest
user: "1000:1000"
restart: always
networks:
- redis
- db
- web
environment:
POSTGRES_USER: feedvault
POSTGRES_PASSWORD: feedvault # Change this to something more secure in production.
# .vscode/settings.json uses feedvault as the password for goose. https://github.com/pressly/goose
POSTGRES_DB: feedvault
- SECRET_KEY=${SECRET_KEY}
- DEBUG=${DEBUG}
- ADMIN_EMAIL=${ADMIN_EMAIL}
- EMAIL_HOST_USER=${EMAIL_HOST_USER}
- EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD}
- REDIS_HOST=redis
- REDIS_PASSWORD=${REDIS_PASSWORD}
- POSTGRES_HOST=feedvault_postgres
- POSTGRES_PORT=5432
- POSTGRES_USER=feedvault
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- ADMIN_PAGE_PATH=${ADMIN_PAGE_PATH}
- DISCORD_WEBHOOK_URL=${DISCORD_WEBHOOK_URL}
volumes:
- ./data:/var/lib/postgresql/data
- /mnt/Fourteen/Docker/FeedVault/staticfiles:/app/staticfiles
# Nginx - Reverse proxy
web:
container_name: feedvault_web
image: lscr.io/linuxserver/nginx:latest
restart: always
environment:
- PUID=1000
- PGID=1000
- TZ=Europe/Stockholm
expose:
- 80
- 443
volumes:
- /mnt/Fourteen/Docker/FeedVault/Nginx:/config
networks:
- web
- tunnel
# Postgres - Database
postgres:
container_name: feedvault_postgres
image: postgres:16
user: "1000:1000"
ports:
- "5432:5432"
- 5432:5432
restart: always
environment:
- POSTGRES_USER=feedvault
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_DB=feedvault
volumes:
- /mnt/Fourteen/Docker/FeedVault/Postgres:/var/lib/postgresql/data
networks:
- db
networks:
tunnel:
driver: bridge
db:
driver: bridge
web:
driver: bridge

View file

@ -1,84 +0,0 @@
package main
import (
"context"
"log"
"time"
"github.com/TheLovinator1/FeedVault/db"
"github.com/jackc/pgx/v5/pgtype"
"github.com/mmcdole/gofeed"
)
func createFeedDublinCore(ctx context.Context, feed *gofeed.Feed, newFeed db.Feed) {
// TODO: Check if this is correct and works. I can't find a feed that has Dublin Core to test with :-)
if feed.DublinCoreExt == nil {
log.Printf("No Dublin Core to add to database")
return
}
// Add Dublin Core to the database
_, err := DB.CreateFeedDublinCore(ctx, db.CreateFeedDublinCoreParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Title: feed.DublinCoreExt.Title,
Creator: feed.DublinCoreExt.Creator,
Subject: feed.DublinCoreExt.Subject,
Source: feed.DublinCoreExt.Source,
Publisher: feed.DublinCoreExt.Publisher,
Contributor: feed.DublinCoreExt.Contributor,
Description: feed.DublinCoreExt.Description,
Date: feed.DublinCoreExt.Date,
Type: feed.DublinCoreExt.Type,
Format: feed.DublinCoreExt.Format,
Identifier: feed.DublinCoreExt.Identifier,
Language: feed.DublinCoreExt.Language,
Relation: feed.DublinCoreExt.Relation,
Coverage: feed.DublinCoreExt.Coverage,
Rights: feed.DublinCoreExt.Rights,
FeedID: newFeed.ID,
})
if err != nil {
log.Printf("Error adding Dublin Core to database: %s", err)
return
}
log.Printf("Dublin Core added to database")
}
func createItemDublinCore(ctx context.Context, item *gofeed.Item, newItem db.Item) {
if item.DublinCoreExt == nil {
log.Printf("No Dublin Core to add to database")
return
}
// Add Dublin Core to the database
_, err := DB.CreateItemDublinCore(ctx, db.CreateItemDublinCoreParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Title: item.DublinCoreExt.Title,
Creator: item.DublinCoreExt.Creator,
Subject: item.DublinCoreExt.Subject,
Source: item.DublinCoreExt.Source,
Publisher: item.DublinCoreExt.Publisher,
Contributor: item.DublinCoreExt.Contributor,
Description: item.DublinCoreExt.Description,
Date: item.DublinCoreExt.Date,
Type: item.DublinCoreExt.Type,
Format: item.DublinCoreExt.Format,
Identifier: item.DublinCoreExt.Identifier,
Language: item.DublinCoreExt.Language,
Relation: item.DublinCoreExt.Relation,
Coverage: item.DublinCoreExt.Coverage,
Rights: item.DublinCoreExt.Rights,
ItemID: newItem.ID,
})
if err != nil {
log.Printf("Error adding Dublin Core to database: %s", err)
return
}
log.Printf("Dublin Core added to database")
}

View file

@ -1,105 +0,0 @@
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"time"
"github.com/TheLovinator1/FeedVault/db"
"github.com/jackc/pgx/v5/pgtype"
"github.com/mmcdole/gofeed"
)
func addFeedExtensionToDB(ctx context.Context, feed *gofeed.Feed, newFeed db.Feed) {
// Add extensions to the database
// TODO: Check if this is correct and works
for _, ext := range feed.Extensions {
for _, exts := range ext {
for _, e := range exts {
attrsCustom := []byte("{}")
if e.Attrs != nil {
var err error
attrsCustom, err = json.Marshal(e.Attrs)
if err != nil {
fmt.Println("Error marshalling extension attributes:", err)
attrsCustom = []byte("{}")
}
}
childrenCustom := []byte("{}")
if e.Children != nil {
var err error
childrenCustom, err = json.Marshal(e.Children)
if err != nil {
fmt.Println("Error marshalling extension children:", err)
childrenCustom = []byte("{}")
}
}
_, err := DB.CreateFeedExtension(ctx, db.CreateFeedExtensionParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Name: pgtype.Text{String: e.Name, Valid: e.Name != ""},
Value: pgtype.Text{String: e.Value, Valid: e.Value != ""},
Attrs: attrsCustom,
Children: childrenCustom,
FeedID: newFeed.ID,
})
if err != nil {
log.Printf("Error adding extension to database: %s", err)
}
}
}
}
}
func addItemExtensionToDB(ctx context.Context, item *gofeed.Item, newItem db.Item) {
// Add extensions to the database
for _, ext := range item.Extensions {
for _, exts := range ext {
for _, e := range exts {
attrsCustom := []byte("{}")
if e.Attrs != nil {
var err error
attrsCustom, err = json.Marshal(e.Attrs)
if err != nil {
fmt.Println("Error marshalling extension attributes:", err)
attrsCustom = []byte("{}")
}
}
childrenCustom := []byte("{}")
if e.Children != nil {
var err error
childrenCustom, err = json.Marshal(e.Children)
if err != nil {
fmt.Println("Error marshalling extension children:", err)
childrenCustom = []byte("{}")
}
}
_, err := DB.CreateItemExtension(ctx, db.CreateItemExtensionParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Name: pgtype.Text{String: e.Name, Valid: e.Name != ""},
Value: pgtype.Text{String: e.Value, Valid: e.Value != ""},
Attrs: attrsCustom,
Children: childrenCustom,
ItemID: newItem.ID,
})
if err != nil {
log.Printf("Error adding extension to database: %s", err)
continue
}
log.Printf("Extension added to database")
}
}
}
}

View file

@ -1,67 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"time"
"github.com/mmcdole/gofeed"
)
func AddFeedToDB(feedURL string) error {
// Cancel the request after 60 seconds if it hasn't finished
ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
defer cancel()
// Parse the feed
fp := gofeed.NewParser()
fp.UserAgent = "FeedVault/1.0 (RSS feed archive; https://feedvault.se; bot@feedvault.se; TheLovinator#9276)"
feed, err := fp.ParseURLWithContext(feedURL, ctx)
if err != nil {
return fmt.Errorf("Error parsing feed: %s", err)
}
// Add the feed to the database
newFeed, err := DB.CreateFeed(ctx, makeCreateFeedParams(feedURL, feed))
if err != nil {
return fmt.Errorf("Error adding feed to database: %s", err)
}
log.Printf("Feed added to database")
// Add the items to the database
for _, item := range feed.Items {
addItemToDB(item, ctx, newFeed)
}
// Add extensions to the database
log.Printf("Adding extensions to the database")
addFeedExtensionToDB(ctx, feed, newFeed)
// Add authors to the database
log.Printf("Adding authors to the database")
addFeedAuthors(ctx, feed, newFeed)
// TODO: Add categories to the database
// Add images to the database
log.Printf("Adding images to the database")
addFeedImages(ctx, feed, newFeed)
// Add Dublin Core to the database
log.Printf("Adding Dublin Core to the database")
createFeedDublinCore(ctx, feed, newFeed)
// Add iTunes extensions to the database
log.Printf("Adding iTunes extensions to the database")
itunes, err := createFeedItunes(ctx, feed, newFeed)
if err != nil {
log.Printf("Error adding iTunes extensions to database: %s", err)
}
createFeedItunesCategories(ctx, feed, itunes)
createFeedItunesOwners(ctx, feed, itunes)
log.Printf("Feed added to database")
return nil
}

0
feeds/__init__.py Normal file
View file

1
feeds/admin.py Normal file
View file

@ -0,0 +1 @@
# Register your models here.

8
feeds/apps.py Normal file
View file

@ -0,0 +1,8 @@
from django.apps import AppConfig
class FeedsConfig(AppConfig):
"""This Django app is responsible for managing the feeds."""
default_auto_field = "django.db.models.BigAutoField"
name = "feeds"

View file

@ -0,0 +1,116 @@
# Generated by Django 5.0.2 on 2024-02-18 20:59
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Domain',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('url', models.URLField()),
('categories', models.JSONField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('hidden', models.BooleanField(default=False)),
('hidden_at', models.DateTimeField(blank=True, null=True)),
('hidden_reason', models.TextField(blank=True)),
],
),
migrations.CreateModel(
name='Feed',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('bozo', models.BooleanField()),
('bozo_exception', models.TextField()),
('encoding', models.TextField()),
('etag', models.TextField()),
('headers', models.JSONField()),
('href', models.TextField()),
('modified', models.DateTimeField()),
('namespaces', models.JSONField()),
('status', models.IntegerField()),
('version', models.CharField(max_length=50)),
('author', models.TextField()),
('author_detail', models.JSONField()),
('cloud', models.JSONField()),
('contributors', models.JSONField()),
('docs', models.TextField()),
('errorreportsto', models.TextField()),
('generator', models.TextField()),
('generator_detail', models.TextField()),
('icon', models.TextField()),
('_id', models.TextField()),
('image', models.JSONField()),
('info', models.TextField()),
('info_detail', models.JSONField()),
('language', models.TextField()),
('license', models.TextField()),
('link', models.TextField()),
('links', models.JSONField()),
('logo', models.TextField()),
('published', models.TextField()),
('published_parsed', models.DateTimeField()),
('publisher', models.TextField()),
('publisher_detail', models.JSONField()),
('rights', models.TextField()),
('rights_detail', models.JSONField()),
('subtitle', models.TextField()),
('subtitle_detail', models.JSONField()),
('tags', models.JSONField()),
('textinput', models.JSONField()),
('title', models.TextField()),
('title_detail', models.JSONField()),
('ttl', models.TextField()),
('updated', models.TextField()),
('updated_parsed', models.DateTimeField()),
('domain', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='feeds.domain')),
],
),
migrations.CreateModel(
name='Entry',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('author', models.TextField()),
('author_detail', models.JSONField()),
('comments', models.TextField()),
('content', models.JSONField()),
('contributors', models.JSONField()),
('created', models.TextField()),
('created_parsed', models.DateTimeField()),
('enclosures', models.JSONField()),
('expired', models.TextField()),
('expired_parsed', models.DateTimeField()),
('_id', models.TextField()),
('license', models.TextField()),
('link', models.TextField()),
('links', models.JSONField()),
('published', models.TextField()),
('published_parsed', models.DateTimeField()),
('publisher', models.TextField()),
('publisher_detail', models.JSONField()),
('source', models.JSONField()),
('summary', models.TextField()),
('summary_detail', models.JSONField()),
('tags', models.JSONField()),
('title', models.TextField()),
('title_detail', models.JSONField()),
('updated', models.TextField()),
('updated_parsed', models.DateTimeField()),
('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='feeds.feed')),
],
),
]

View file

122
feeds/models.py Normal file
View file

@ -0,0 +1,122 @@
from __future__ import annotations
from typing import Literal
from django.db import models
from django.db.models import JSONField
class Domain(models.Model):
"""A domain that has one or more feeds."""
name = models.CharField(max_length=255, unique=True)
url = models.URLField()
categories = models.JSONField()
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
hidden = models.BooleanField(default=False)
hidden_at = models.DateTimeField(null=True, blank=True)
hidden_reason = models.TextField(blank=True)
def __str__(self) -> str:
"""Return string representation of the domain."""
if_hidden: Literal[" (hidden)", ""] = " (hidden)" if self.hidden else ""
return self.name + if_hidden
class Feed(models.Model):
"""A RSS/Atom/JSON feed."""
domain = models.ForeignKey(Domain, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
# General data
bozo = models.BooleanField()
bozo_exception = models.TextField()
encoding = models.TextField()
etag = models.TextField()
headers = JSONField()
href = models.TextField()
modified = models.DateTimeField()
namespaces = JSONField()
status = models.IntegerField()
version = models.CharField(max_length=50)
# Feed data
author = models.TextField()
author_detail = JSONField()
cloud = JSONField()
contributors = JSONField()
docs = models.TextField()
errorreportsto = models.TextField()
generator = models.TextField()
generator_detail = models.TextField()
icon = models.TextField()
_id = models.TextField()
image = JSONField()
info = models.TextField()
info_detail = JSONField()
language = models.TextField()
license = models.TextField()
link = models.TextField()
links = JSONField()
logo = models.TextField()
published = models.TextField()
published_parsed = models.DateTimeField()
publisher = models.TextField()
publisher_detail = JSONField()
rights = models.TextField()
rights_detail = JSONField()
subtitle = models.TextField()
subtitle_detail = JSONField()
tags = JSONField()
textinput = JSONField()
title = models.TextField()
title_detail = JSONField()
ttl = models.TextField()
updated = models.TextField()
updated_parsed = models.DateTimeField()
def __str__(self) -> str:
"""Return string representation of the feed."""
return self.title_detail["value"] or "No title"
class Entry(models.Model):
"""Each feed has multiple entries."""
feed = models.ForeignKey(Feed, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
# Entry data
author = models.TextField()
author_detail = JSONField()
comments = models.TextField()
content = JSONField()
contributors = JSONField()
created = models.TextField()
created_parsed = models.DateTimeField()
enclosures = JSONField()
expired = models.TextField()
expired_parsed = models.DateTimeField()
_id = models.TextField()
license = models.TextField()
link = models.TextField()
links = JSONField()
published = models.TextField()
published_parsed = models.DateTimeField()
publisher = models.TextField()
publisher_detail = JSONField()
source = JSONField()
summary = models.TextField()
summary_detail = JSONField()
tags = JSONField()
title = models.TextField()
title_detail = JSONField()
updated = models.TextField()
updated_parsed = models.DateTimeField()
def __str__(self) -> str:
"""Return string representation of the entry."""
return self.title_detail["value"] or "No title"

1
feeds/tests.py Normal file
View file

@ -0,0 +1 @@
# Create your tests here.

13
feeds/urls.py Normal file
View file

@ -0,0 +1,13 @@
from __future__ import annotations
from django.urls import URLPattern, path
from feeds import views
app_name: str = "feeds"
urlpatterns: list[URLPattern] = [
path(route="", view=views.IndexView.as_view(), name="index"),
path(route="feed/<int:feed_id>/", view=views.FeedView.as_view(), name="feed"),
path(route="feeds/", view=views.FeedsView.as_view(), name="feeds"),
]

35
feeds/views.py Normal file
View file

@ -0,0 +1,35 @@
from __future__ import annotations
from django.http import HttpRequest, HttpResponse
from django.template import loader
from django.views import View
class IndexView(View):
"""Index path."""
def get(self, request: HttpRequest) -> HttpResponse:
"""GET request for index path."""
template = loader.get_template(template_name="index.html")
context = {}
return HttpResponse(content=template.render(context=context, request=request))
class FeedView(View):
"""A single feed."""
def get(self, request: HttpRequest, feed_id: int) -> HttpResponse:
"""GET request for index path."""
template = loader.get_template(template_name="feed.html")
context = {"feed_id": feed_id}
return HttpResponse(content=template.render(context=context, request=request))
class FeedsView(View):
"""All feeds."""
def get(self, request: HttpRequest) -> HttpResponse:
"""GET request for index path."""
template = loader.get_template(template_name="feeds.html")
context = {}
return HttpResponse(content=template.render(context=context, request=request))

Binary file not shown.

0
feedvault/__init__.py Normal file
View file

15
feedvault/asgi.py Normal file
View file

@ -0,0 +1,15 @@
"""ASGI config for feedvault project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/5.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "feedvault.settings")
application = get_asgi_application()

151
feedvault/settings.py Normal file
View file

@ -0,0 +1,151 @@
from __future__ import annotations
import os
from pathlib import Path
from dotenv import find_dotenv, load_dotenv
load_dotenv(dotenv_path=find_dotenv(), verbose=True)
# Run Django in debug mode
DEBUG: bool = os.getenv(key="DEBUG", default="True").lower() == "true"
BASE_DIR: Path = Path(__file__).resolve().parent.parent
# The secret key is used for cryptographic signing, and should be set to a unique, unpredictable value.
SECRET_KEY: str = os.getenv("SECRET_KEY", default="")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = [
"feeds.apps.FeedsConfig",
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "feedvault.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "feedvault.wsgi.application"
# Database
# https://docs.djangoproject.com/en/5.0/ref/settings/#databases
DATABASES: dict[str, dict[str, str]] = {
"default": {
"ENGINE": "django.db.backends.postgresql",
"NAME": "feedvault",
"USER": os.getenv(key="POSTGRES_USER", default=""),
"PASSWORD": os.getenv(key="POSTGRES_PASSWORD", default=""),
"HOST": os.getenv(key="POSTGRES_HOST", default=""),
"PORT": os.getenv(key="POSTGRES_PORT", default="5432"),
},
}
# Password validation
# https://docs.djangoproject.com/en/5.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# A list of all the people who get code error notifications. When DEBUG=False and a view raises an exception, Django
ADMINS: list[tuple[str, str]] = [("Joakim Hellsén", "django@feedvault.se")]
# A list of strings representing the host/domain names that this Django site can serve.
# .feedvault.se will match *.feedvault.se and feedvault.se
ALLOWED_HOSTS: list[str] = [".feedvault.se", ".localhost", "127.0.0.1"]
# The time zone that Django will use to display datetimes in templates and to interpret datetimes entered in forms
TIME_ZONE = "Europe/Stockholm"
# If datetimes will be timezone-aware by default. If True, Django will use timezone-aware datetimes internally.
USE_TZ = True
# Don't use Django's translation system
USE_I18N = False
# Decides which translation is served to all users.
LANGUAGE_CODE = "en-us"
# Default decimal separator used when formatting decimal numbers.
DECIMAL_SEPARATOR = ","
# Use a space as the thousand separator instead of a comma
THOUSAND_SEPARATOR = " "
# Use gmail for sending emails
EMAIL_HOST = "smtp.gmail.com"
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_HOST_USER: str = os.getenv(key="EMAIL_HOST_USER", default="webmaster@localhost")
EMAIL_HOST_PASSWORD: str = os.getenv(key="EMAIL_HOST_PASSWORD", default="")
EMAIL_SUBJECT_PREFIX = "[FeedVault] "
EMAIL_USE_LOCALTIME = True
EMAIL_TIMEOUT = 10
DEFAULT_FROM_EMAIL: str = os.getenv(key="EMAIL_HOST_USER", default="webmaster@localhost")
SERVER_EMAIL: str = os.getenv(key="EMAIL_HOST_USER", default="webmaster@localhost")
# Use the X-Forwarded-Host header
USE_X_FORWARDED_HOST = True
# Set the Referrer Policy HTTP header on all responses that do not already have one.
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
# Internal IPs that are allowed to see debug views
INTERNAL_IPS: list[str] = ["127.0.0.1", "localhost"]
STATIC_URL = "static/"
STATIC_ROOT: Path = BASE_DIR / "staticfiles"
STATICFILES_DIRS: list[Path] = [BASE_DIR / "static"]
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
# Our site ID
SITE_ID = 1

6
feedvault/urls.py Normal file
View file

@ -0,0 +1,6 @@
from django.contrib import admin
from django.urls import path
urlpatterns = [
path("admin/", admin.site.urls),
]

8
feedvault/wsgi.py Normal file
View file

@ -0,0 +1,8 @@
import os
from django.core.handlers.wsgi import WSGIHandler
from django.core.wsgi import get_wsgi_application
os.environ.setdefault(key="DJANGO_SETTINGS_MODULE", value="feedvault.settings")
application: WSGIHandler = get_wsgi_application()

25
go.mod
View file

@ -1,25 +0,0 @@
module github.com/TheLovinator1/FeedVault
go 1.22.0
require (
github.com/jackc/pgx/v5 v5.5.3
github.com/joho/godotenv v1.5.1
github.com/mmcdole/gofeed v1.2.1
)
require (
github.com/PuerkitoBio/goquery v1.8.0 // indirect
github.com/andybalholm/cascadia v1.3.1 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/puddle/v2 v2.2.1 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/mmcdole/goxpp v1.1.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
golang.org/x/crypto v0.17.0 // indirect
golang.org/x/net v0.10.0 // indirect
golang.org/x/sync v0.1.0 // indirect
golang.org/x/text v0.14.0 // indirect
)

54
go.sum
View file

@ -1,54 +0,0 @@
github.com/PuerkitoBio/goquery v1.8.0 h1:PJTF7AmFCFKk1N6V6jmKfrNH9tV5pNE6lZMkG0gta/U=
github.com/PuerkitoBio/goquery v1.8.0/go.mod h1:ypIiRMtY7COPGk+I/YbZLbxsxn9g5ejnI2HSMtkjZvI=
github.com/andybalholm/cascadia v1.3.1 h1:nhxRkql1kdYCc8Snf7D5/D3spOX+dBgjA6u8x004T2c=
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.5.3 h1:Ces6/M3wbDXYpM8JyyPD57ivTtJACFZJd885pdIaV2s=
github.com/jackc/pgx/v5 v5.5.3/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/mmcdole/gofeed v1.2.1 h1:tPbFN+mfOLcM1kDF1x2c/N68ChbdBatkppdzf/vDe1s=
github.com/mmcdole/gofeed v1.2.1/go.mod h1:2wVInNpgmC85q16QTTuwbuKxtKkHLCDDtf0dCmnrNr4=
github.com/mmcdole/goxpp v1.1.0 h1:WwslZNF7KNAXTFuzRtn/OKZxFLJAAyOA9w82mDz2ZGI=
github.com/mmcdole/goxpp v1.1.0/go.mod h1:v+25+lT2ViuQ7mVxcncQ8ch1URund48oH+jhjiwEgS8=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View file

@ -1,490 +0,0 @@
package main
import (
"context"
"io"
"log"
"net/http"
"strconv"
"strings"
"github.com/TheLovinator1/FeedVault/db"
)
func ApiHandler(w http.ResponseWriter, _ *http.Request) {
htmlData := HTMLData{
Title: "FeedVault API",
Description: "FeedVault API - A feed archive",
Keywords: "RSS, Atom, Feed, Archive, API",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/api",
Content: "<p>Here be dragons.</p>",
}
html := FullHTML(htmlData)
_, err := w.Write([]byte(html))
if err != nil {
log.Println("Error writing response:", err)
}
}
func FeedsHandler(w http.ResponseWriter, _ *http.Request) {
feeds, err := DB.GetFeeds(context.Background(), db.GetFeedsParams{
Limit: 100,
})
if err != nil {
log.Println("Error getting feeds:", err)
}
fb := strings.Builder{}
for _, feed := range feeds {
authors, err := DB.GetFeedAuthors(context.Background(), db.GetFeedAuthorsParams{
FeedID: feed.ID,
Limit: 100,
})
if err != nil {
http.Error(w, "Error getting authors", http.StatusInternalServerError)
}
extensions, err := DB.GetFeedExtensions(context.Background(), db.GetFeedExtensionsParams{
FeedID: feed.ID,
Limit: 100,
})
if err != nil {
log.Println("Error getting extensions:", err)
}
// Get the itunes extensions
itunes, err := DB.GetFeedItunes(context.Background(), feed.ID)
if err != nil {
log.Println("Error getting itunes extensions:", err)
}
fb.WriteString("<li>")
fb.WriteString(feed.Title.String)
fb.WriteString("<ul>")
for _, author := range authors {
if author.Name.Valid {
fb.WriteString("<li>Author: " + author.Name.String + "</li>")
}
if author.Email.Valid {
fb.WriteString("<li>Email: " + author.Email.String + "</li>")
}
}
for _, ext := range extensions {
if ext.Attrs != nil {
fb.WriteString("<li>Attrs: " + string(ext.Attrs) + "</li>")
}
if ext.Children != nil {
fb.WriteString("<li>Children: " + string(ext.Children) + "</li>")
}
if ext.Name.Valid {
fb.WriteString("<li>Name: " + ext.Name.String + "</li>")
}
if ext.Value.Valid {
fb.WriteString("<li>Value: " + ext.Value.String + "</li>")
}
}
// Itunes extensions
fb.WriteString("<ul>")
if itunes.Author.Valid {
fb.WriteString("<li>Itunes Author: " + itunes.Author.String + "</li>")
}
if itunes.Block.Valid {
fb.WriteString("<li>Itunes Block: " + itunes.Block.String + "</li>")
}
if itunes.Explicit.Valid {
fb.WriteString("<li>Itunes Explicit: " + itunes.Explicit.String + "</li>")
}
if itunes.Image.Valid {
fb.WriteString("<li>Itunes Image: " + itunes.Image.String + "</li>")
}
if itunes.Keywords.Valid {
fb.WriteString("<li>Itunes Keywords: " + itunes.Keywords.String + "</li>")
}
if itunes.Subtitle.Valid {
fb.WriteString("<li>Itunes Subtitle: " + itunes.Subtitle.String + "</li>")
}
if itunes.Summary.Valid {
fb.WriteString("<li>Itunes Summary: " + itunes.Summary.String + "</li>")
}
if itunes.Type.Valid {
fb.WriteString("<li>Itunes Type: " + itunes.Type.String + "</li>")
}
fb.WriteString("</ul>")
images, err := DB.GetFeedImages(context.Background(), db.GetFeedImagesParams{
FeedID: feed.ID,
Limit: 100,
})
if err != nil {
log.Println("Error getting images:", err)
continue
}
for _, image := range images {
fb.WriteString("<li><img src=\"" + image.Url.String + "\" alt=\"Feed Image\" width=\"256\"></li>")
}
fb.WriteString("</ul>")
fb.WriteString("<a href=\"/feed/" + strconv.FormatInt(feed.ID, 10) + "\">" + feed.Url + "</a>")
fb.WriteString("</li>")
}
htmlData := HTMLData{
Title: "FeedVault Feeds",
Description: "FeedVault Feeds - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/feeds",
Content: "<ul>" + fb.String() + "</ul>",
}
html := FullHTML(htmlData)
_, err = w.Write([]byte(html))
if err != nil {
log.Println("Error writing response:", err)
}
}
func AddFeedHandler(w http.ResponseWriter, r *http.Request) {
var parseErrors []ParseResult
// Parse the form and get the URLs
err := r.ParseForm()
if err != nil {
http.Error(w, "Error parsing form", http.StatusInternalServerError)
return
}
urls := r.Form.Get("urls")
if urls == "" {
http.Error(w, "No URLs provided", http.StatusBadRequest)
return
}
for _, feed_url := range strings.Split(urls, "\n") {
// TODO: Try to upgrade to https if http is provided
// Validate the URL
err := ValidateFeedURL(feed_url)
if err != nil {
parseErrors = append(parseErrors, ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
err = AddFeedToDB(feed_url)
if err != nil {
parseErrors = append(parseErrors, ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
// Feed was added successfully
parseErrors = append(parseErrors, ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false})
}
htmlData := HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: "<p>Feeds added.</p>",
ParseResult: parseErrors,
}
html := FullHTML(htmlData)
_, err = w.Write([]byte(html))
if err != nil {
log.Println("Error writing response:", err)
}
}
func IndexHandler(w http.ResponseWriter, _ *http.Request) {
content := `<h2>Feeds to archive</h2>
<p>
Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived.
</p>
<form action="/add" method="post">
<textarea id="urls" name="urls" rows="5" cols="50" required></textarea>
<button type="submit">Add feeds</button>
</form>
<br>
<p>You can also upload .opml files containing the feeds you wish to archive:</p>
<form enctype="multipart/form-data" method="post" action="/upload_opml">
<input type="file" name="file" id="file" accept=".opml" required>
<button type="submit">Upload OPML</button>
</form>
`
FAQ := `
<h2>FAQ</h2>
<details>
<summary>What are web feeds?</summary>
<p>
Web feeds are a way to distribute content on the web. They allow users to access updates from websites without having to visit them directly. Feeds are typically used for news websites, blogs, and other sites that frequently update content.
<br>
You can read more about web feeds on <a href="https://en.wikipedia.org/wiki/Web_feed">Wikipedia</a>.
</p>
<hr>
</details>
<details>
<summary>What is FeedVault?</summary>
<p>
FeedVault is a service that archives web It allows users to access and search for historical content from various websites. The service is designed to preserve the history of the web and provide a reliable source for accessing content that may no longer be available on the original websites.
</p>
<hr>
</details>
<details>
<summary>Why archive feeds?</summary>
<p>
Web feeds are a valuable source of information, and archiving them ensures that the content is preserved for future reference. By archiving feeds, we can ensure that historical content is available for research, analysis, and other purposes. Additionally, archiving feeds can help prevent the loss of valuable information due to website changes, outages, or other issues.
</p>
<hr>
</details>
<details>
<summary>How does it work?</summary>
<p>
FeedVault is written in Go and uses the <a href="https://github.com/mmcdole/gofeed">gofeed</a> library to parse The service periodically checks for new content in the feeds and stores it in a database. Users can access the archived feeds through the website or API.
<hr>
</details>
<details>
<summary>How can I access the archived feeds?</summary>
<p>
You can access the archived feeds through the website or API. The website provides a user interface for searching and browsing the feeds, while the API allows you to access the feeds programmatically. You can also download the feeds in various formats, such as JSON, XML, or RSS.
</p>
</details>
`
content += FAQ
htmlData := HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: content,
}
html := FullHTML(htmlData)
_, err := w.Write([]byte(html))
if err != nil {
log.Println("Error writing response:", err)
}
}
func UploadOpmlHandler(w http.ResponseWriter, r *http.Request) {
// Parse the form and get the file
err := r.ParseMultipartForm(10 << 20) // 10 MB
if err != nil {
http.Error(w, "Error parsing form", http.StatusInternalServerError)
return
}
file, _, err := r.FormFile("file")
if err != nil {
http.Error(w, "No file provided", http.StatusBadRequest)
return
}
defer file.Close()
// Read the file
all, err := io.ReadAll(file)
if err != nil {
http.Error(w, "Failed to read file", http.StatusInternalServerError)
return
}
// Parse the OPML file
parseResult := []ParseResult{}
links, err := ParseOpml(string(all))
if err != nil {
parseResult = append(parseResult, ParseResult{FeedURL: "/upload_opml", Msg: err.Error(), IsError: true})
} else {
// Add the feeds to the database
for _, feed_url := range links.XMLLinks {
log.Println("Adding feed:", feed_url)
// Validate the URL
err := ValidateFeedURL(feed_url)
if err != nil {
parseResult = append(parseResult, ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
parseResult = append(parseResult, ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false})
}
}
// Return the results
htmlData := HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: "<p>Feeds added.</p>",
ParseResult: parseResult,
}
html := FullHTML(htmlData)
_, err = w.Write([]byte(html))
if err != nil {
log.Println("Error writing response:", err)
}
}
func FeedHandler(w http.ResponseWriter, r *http.Request) {
// Get the feed ID from the URL
parts := strings.Split(r.URL.Path, "/")
if len(parts) < 3 {
http.Error(w, "No feed ID provided", http.StatusBadRequest)
return
}
feedID, err := strconv.ParseInt(parts[2], 10, 64)
if err != nil {
http.Error(w, "Invalid feed ID", http.StatusBadRequest)
return
}
// Get the feed from the database
feed, err := DB.GetFeed(context.Background(), feedID)
if err != nil {
http.Error(w, "Error getting feed", http.StatusInternalServerError)
return
}
// Get the items for the feed
items, err := DB.GetItems(context.Background(), db.GetItemsParams{
FeedID: feedID,
Limit: 100,
})
if err != nil {
log.Println("Error getting items:", err)
}
// Build the HTML
fb := strings.Builder{}
for _, item := range items {
// Get authors for the item
authors, err := DB.GetItemAuthors(context.Background(), db.GetItemAuthorsParams{
ItemID: item.ID,
Limit: 100,
})
if err != nil {
http.Error(w, "Error getting authors", http.StatusInternalServerError)
return
}
// Get extensions for the item
extensions, err := DB.GetItemExtensions(context.Background(), db.GetItemExtensionsParams{
ItemID: item.ID,
Limit: 100,
})
if err != nil {
http.Error(w, "Error getting extensions", http.StatusInternalServerError)
return
}
fb.WriteString("<li>")
fb.WriteString("<a href=\"" + item.Link.String + "\">" + item.Title.String + "</a>")
fb.WriteString("<ul>")
for _, author := range authors {
fb.WriteString("<li>Author: " + author.Name.String + "</li>")
}
for _, ext := range extensions {
fb.WriteString("<ul>")
fb.WriteString("<li>Extension: " + ext.Name.String + "</li>")
if ext.Value.Valid {
fb.WriteString("<li>Name: " + ext.Value.String + "</li>")
}
if ext.Attrs != nil {
fb.WriteString("<li>Attrs: " + string(ext.Attrs) + "</li>")
}
if ext.Children != nil {
fb.WriteString("<li>Children: " + string(ext.Children) + "</li>")
}
fb.WriteString("</ul>")
}
// Get images for the item
images, err := DB.GetItemImages(context.Background(), db.GetItemImagesParams{
ItemID: item.ID,
Limit: 100,
})
if err != nil {
http.Error(w, "Error getting images", http.StatusInternalServerError)
return
}
for _, image := range images {
fb.WriteString("<li><img src=\"" + image.Url.String + "\" alt=\"Feed Image\" width=\"256\"></li>")
}
fb.WriteString("</ul>")
fb.WriteString("<ul>")
if item.Published.Valid {
fb.WriteString("<li>Published: " + item.Published.String + "</li>")
}
if item.Updated.Valid {
fb.WriteString("<li>Updated: " + item.Updated.String + "</li>")
}
if item.Description.Valid {
fb.WriteString("<li>" + item.Description.String + "</li>")
}
if item.Content.Valid {
fb.WriteString("<li>" + item.Content.String + "</li>")
}
fb.WriteString("</ul>")
fb.WriteString("<hr>")
fb.WriteString("</li>")
// Itunes extensions
itunes, err := DB.GetItemItunes(context.Background(), item.ID)
if err != nil {
log.Println("Error getting itunes extensions:", err)
}
fb.WriteString("<ul>")
if itunes.Author.Valid {
fb.WriteString("<li>Itunes Author: " + itunes.Author.String + "</li>")
}
if itunes.Block.Valid {
fb.WriteString("<li>Itunes Block: " + itunes.Block.String + "</li>")
}
if itunes.Duration.Valid {
fb.WriteString("<li>Itunes Duration: " + itunes.Duration.String + "</li>")
}
if itunes.Explicit.Valid {
fb.WriteString("<li>Itunes Explicit: " + itunes.Explicit.String + "</li>")
}
if itunes.Image.Valid {
fb.WriteString("<li>Itunes Image: " + itunes.Image.String + "</li>")
}
if itunes.Keywords.Valid {
fb.WriteString("<li>Itunes Keywords: " + itunes.Keywords.String + "</li>")
}
if itunes.Subtitle.Valid {
fb.WriteString("<li>Itunes Subtitle: " + itunes.Subtitle.String + "</li>")
}
if itunes.Summary.Valid {
fb.WriteString("<li>Itunes Summary: " + itunes.Summary.String + "</li>")
}
fb.WriteString("</ul>")
}
htmlData := HTMLData{
Title: feed.Title.String,
Description: feed.Description.String,
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/feed/" + strconv.FormatInt(feed.ID, 10),
Content: "<ul>" + fb.String() + "</ul>",
}
html := FullHTML(htmlData)
_, err = w.Write([]byte(html))
if err != nil {
log.Println("Error writing response:", err)
}
}

View file

@ -1,68 +0,0 @@
package main
import (
"net/http"
"net/http/httptest"
"strings"
"testing"
)
func TestIndexHandler(t *testing.T) {
// Create a request to pass to our handler.
req, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatal(err)
}
// We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response.
rr := httptest.NewRecorder()
handler := http.HandlerFunc(IndexHandler)
// Our handlers satisfy http.Handler, so we can call their ServeHTTP method
// directly and pass in our Request and ResponseRecorder.
handler.ServeHTTP(rr, req)
// Check the status code is what we expect.
if status := rr.Code; status != http.StatusOK {
t.Errorf("handler returned wrong status code: got %v want %v",
status, http.StatusOK)
}
// Check the response contains the expected string.
shouldContain := "Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived."
body := rr.Body.String()
if !strings.Contains(body, shouldContain) {
t.Errorf("handler returned unexpected body: got %v want %v",
body, shouldContain)
}
}
func TestApiHandler(t *testing.T) {
// Create a request to pass to our handler.
req, err := http.NewRequest("GET", "/api", nil)
if err != nil {
t.Fatal(err)
}
// We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response.
rr := httptest.NewRecorder()
handler := http.HandlerFunc(ApiHandler)
// Our handlers satisfy http.Handler, so we can call their ServeHTTP method
// directly and pass in our Request and ResponseRecorder.
handler.ServeHTTP(rr, req)
// Check the status code is what we expect.
if status := rr.Code; status != http.StatusOK {
t.Errorf("handler returned wrong status code: got %v want %v",
status, http.StatusOK)
}
// Check the response contains the expected string.
shouldContain := "Here be dragons."
body := rr.Body.String()
if !strings.Contains(body, shouldContain) {
t.Errorf("handler returned unexpected body: got %v want %v",
body, shouldContain)
}
}

193
html.go
View file

@ -1,193 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"math/rand"
"strings"
)
// Used for success/error message at the top of the page after adding a feed.
type ParseResult struct {
FeedURL string
Msg string
IsError bool
}
// HTMLData is the data passed to the HTML template.
type HTMLData struct {
Title string
Description string
Keywords string
Author string
CanonicalURL string
Content string
ParseResult []ParseResult
}
// Our CSS that is included in the HTML.
var style = `
html {
max-width: 70ch;
padding: calc(1vmin + 0.5rem);
margin-inline: auto;
font-size: clamp(1em, 0.909em + 0.45vmin, 1.25em);
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif;
color-scheme: light dark;
}
h1 {
font-size: 2.5rem;
font-weight: 600;
margin: 0;
}
.title {
text-align: center;
}
.search {
display: flex;
justify-content: center;
margin-top: 1rem;
margin-inline: auto;
}
.leftright {
display: flex;
justify-content: center;
}
.left {
margin-right: auto;
}
.right {
margin-left: auto;
}
textarea {
width: 100%;
height: 10rem;
resize: vertical;
}
.messages {
list-style-type: none;
}
.error {
color: red;
}
.success {
color: green;
}
`
const (
// errorListItem is shown after adding a feed. It shows if error or success.
errorListItem = `<li class="%s"><a href="%s">%s</a> - %s</li>`
// htmlTemplate is the HTML template for the entire page.
htmlTemplate = `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="description" content="%s">
<meta name="keywords" content="%s">
<meta name="author" content="%s">
<link rel="canonical" href="%s">
<title>%s</title>
<style>%s</style>
</head>
<body>
%s
<span class="title"><h1><a href="/">FeedVault</a></h1></span>
<div class="leftright">
<div class="left">
<small>Archive of <a href="https://en.wikipedia.org/wiki/Web_feed">web feeds</a>. %d feeds. ~%s.</small>
</div>
<div class="right">
<form action="#" method="get">
<input type="text" name="q" placeholder="Search">
<button type="submit">Search</button>
</form>
</div>
</div>
<nav>
<small>
<div class="leftright">
<div class="left">
<a href="/">Home</a> | <a href="/feeds">Feeds</a> | <a href="/api">API</a>
</div>
<div class="right">
<a href="https://github.com/TheLovinator1/FeedVault">GitHub</a> | <a href="https://github.com/sponsors/TheLovinator1">Donate</a>
</div>
</div>
</small>
</nav>
<hr>
<main>
%s
</main>
<hr>
<footer>
<small>
<div class="leftright">
<div class="left">
Made by <a href="">Joakim Hellsén</a>.
</div>
<div class="right">No rights reserved.</div>
</div>
<div class="leftright">
<div class="left">
<a href="mailto:hello@feedvault.se">hello@feedvault.se</a>
</div>
<div class="right">
%s
</div>
</div>
</small>
</footer>
</body>
</html>`
)
func buildErrorList(parseResults []ParseResult) string {
var errorBuilder strings.Builder
if len(parseResults) > 0 {
errorBuilder.WriteString("<ul>")
for _, result := range parseResults {
// CSS class for the list item. Green for success, red for error.
listItemClass := "success"
if result.IsError {
listItemClass = "error"
}
errorBuilder.WriteString(fmt.Sprintf(errorListItem, listItemClass, result.FeedURL, result.FeedURL, result.Msg))
}
errorBuilder.WriteString("</ul>")
}
return errorBuilder.String()
}
func FullHTML(h HTMLData) string {
statusMsg := buildErrorList(h.ParseResult)
feedCount, err := DB.CountFeeds(context.Background())
if err != nil {
log.Fatalf("DB.CountFeeds(): %v", err)
feedCount = 0
}
databaseSize, err := GetDBSize()
if err != nil {
databaseSize = "0 KiB"
log.Println("Error getting database size:", err)
}
funMsg := FunMsg[rand.Intn(len(FunMsg))]
return fmt.Sprintf(htmlTemplate, h.Description, h.Keywords, h.Author, h.CanonicalURL, h.Title, style, statusMsg, feedCount, databaseSize, h.Content, funMsg)
}

View file

@ -1,27 +0,0 @@
package main
import (
"strings"
"testing"
)
// Displays error messages if there are any parse errors
func TestErrorMessages(t *testing.T) {
// Initialize test data
parseResult := []ParseResult{
{IsError: true, Msg: "Error 1"},
{IsError: true, Msg: "Error 2"},
}
h := HTMLData{
ParseResult: parseResult,
}
// Invoke function under test
result := FullHTML(h)
// Assert that the result contains the error messages
if !strings.Contains(result, "Error 1") || !strings.Contains(result, "Error 2") {
t.Errorf("Expected error messages, but got: %s", result)
}
}

View file

@ -1,49 +0,0 @@
package main
import (
"context"
"log"
"time"
"github.com/TheLovinator1/FeedVault/db"
"github.com/jackc/pgx/v5/pgtype"
"github.com/mmcdole/gofeed"
)
func addFeedImages(ctx context.Context, feed *gofeed.Feed, newFeed db.Feed) {
if feed.Image == nil {
log.Printf("No image to add to database")
return
}
// TODO: Download the image and store it on the server
_, err := DB.CreateFeedImage(ctx, db.CreateFeedImageParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Url: pgtype.Text{String: feed.Image.URL, Valid: feed.Image.URL != ""},
Title: pgtype.Text{String: feed.Image.Title, Valid: feed.Image.Title != ""},
FeedID: newFeed.ID,
})
if err != nil {
log.Printf("Error adding image to database: %s", err)
return
}
log.Printf("Image added to database: %s", feed.Image.URL)
}
func addItemImages(ctx context.Context, item *gofeed.Item, newItem db.Item) {
_, err := DB.CreateItemImage(ctx, db.CreateItemImageParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Url: pgtype.Text{String: item.Image.URL, Valid: item.Image.URL != ""},
Title: pgtype.Text{String: item.Image.Title, Valid: item.Image.Title != ""},
ItemID: newItem.ID,
})
if err != nil {
log.Printf("Error adding image to database: %s", err)
return
}
log.Printf("Image added to database: %s", item.Image.URL)
}

138
itunes.go
View file

@ -1,138 +0,0 @@
package main
import (
"context"
"log"
"time"
"github.com/TheLovinator1/FeedVault/db"
"github.com/jackc/pgx/v5/pgtype"
"github.com/mmcdole/gofeed"
)
func createFeedItunes(ctx context.Context, feed *gofeed.Feed, newFeed db.Feed) (db.FeedItune, error) {
if feed.ITunesExt == nil {
log.Printf("No iTunes extensions to add to database")
return db.FeedItune{}, nil
}
// Add iTunes extensions to the database
itunesID, err := DB.CreateFeedItunes(ctx, db.CreateFeedItunesParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Author: pgtype.Text{String: feed.ITunesExt.Author, Valid: feed.ITunesExt.Author != ""},
Block: pgtype.Text{String: feed.ITunesExt.Block, Valid: feed.ITunesExt.Block != ""},
Explicit: pgtype.Text{String: feed.ITunesExt.Explicit, Valid: feed.ITunesExt.Explicit != ""},
Keywords: pgtype.Text{String: feed.ITunesExt.Keywords, Valid: feed.ITunesExt.Keywords != ""},
Subtitle: pgtype.Text{String: feed.ITunesExt.Subtitle, Valid: feed.ITunesExt.Subtitle != ""},
Summary: pgtype.Text{String: feed.ITunesExt.Summary, Valid: feed.ITunesExt.Summary != ""},
Image: pgtype.Text{String: feed.ITunesExt.Image, Valid: feed.ITunesExt.Image != ""},
Complete: pgtype.Text{String: feed.ITunesExt.Complete, Valid: feed.ITunesExt.Complete != ""},
NewFeedUrl: pgtype.Text{String: feed.ITunesExt.NewFeedURL, Valid: feed.ITunesExt.NewFeedURL != ""},
Type: pgtype.Text{String: feed.ITunesExt.Type, Valid: feed.ITunesExt.Type != ""},
FeedID: newFeed.ID,
})
if err != nil {
log.Printf("Error adding iTunes extensions to database: %s", err)
return db.FeedItune{}, err
}
log.Printf("iTunes extensions added to database")
return itunesID, nil
}
func createItemItunes(ctx context.Context, item *gofeed.Item, newItem db.Item) (db.ItemItune, error) {
if item.ITunesExt == nil {
log.Printf("No iTunes extensions to add to database")
return db.ItemItune{}, nil
}
// Add iTunes extensions to the database
itunesID, err := DB.CreateItemItunes(ctx, db.CreateItemItunesParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Author: pgtype.Text{String: item.ITunesExt.Author, Valid: item.ITunesExt.Author != ""},
Block: pgtype.Text{String: item.ITunesExt.Block, Valid: item.ITunesExt.Block != ""},
Explicit: pgtype.Text{String: item.ITunesExt.Explicit, Valid: item.ITunesExt.Explicit != ""},
Keywords: pgtype.Text{String: item.ITunesExt.Keywords, Valid: item.ITunesExt.Keywords != ""},
Subtitle: pgtype.Text{String: item.ITunesExt.Subtitle, Valid: item.ITunesExt.Subtitle != ""},
Summary: pgtype.Text{String: item.ITunesExt.Summary, Valid: item.ITunesExt.Summary != ""},
Image: pgtype.Text{String: item.ITunesExt.Image, Valid: item.ITunesExt.Image != ""},
IsClosedCaptioned: pgtype.Text{String: item.ITunesExt.IsClosedCaptioned, Valid: item.ITunesExt.IsClosedCaptioned != ""},
Episode: pgtype.Text{String: item.ITunesExt.Episode, Valid: item.ITunesExt.Episode != ""},
Season: pgtype.Text{String: item.ITunesExt.Season, Valid: item.ITunesExt.Season != ""},
Order: pgtype.Text{String: item.ITunesExt.Order, Valid: item.ITunesExt.Order != ""},
EpisodeType: pgtype.Text{String: item.ITunesExt.EpisodeType, Valid: item.ITunesExt.EpisodeType != ""},
ItemID: newItem.ID,
})
if err != nil {
log.Printf("Error adding iTunes extensions to database: %s", err)
return db.ItemItune{}, err
}
log.Printf("iTunes extensions added to database")
return itunesID, nil
}
func createFeedItunesCategories(ctx context.Context, feed *gofeed.Feed, itunes db.FeedItune) {
if feed.ITunesExt == nil {
log.Printf("No iTunes categories to add to database")
return
}
for _, cat := range feed.ITunesExt.Categories {
newCat, err := DB.CreateFeedItunesCategory(ctx, db.CreateFeedItunesCategoryParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Text: pgtype.Text{String: cat.Text, Valid: cat.Text != ""}, // 🐈 meow
ItunesID: itunes.ID,
})
if err != nil {
log.Printf("Error adding iTunes category to database: %s", err)
continue
}
log.Printf("iTunes category added to database: %s", cat.Text)
// Add subcategories to the database
if cat.Subcategory != nil {
_, err = DB.CreateFeedItunesCategory(ctx, db.CreateFeedItunesCategoryParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Text: pgtype.Text{String: cat.Subcategory.Text, Valid: cat.Subcategory.Text != ""}, // 🐈 meow
ItunesID: itunes.ID,
Subcategory: pgtype.Int8{Int64: newCat.ID, Valid: true},
})
if err != nil {
log.Printf("Error adding iTunes subcategory to database: %s", err)
continue
}
log.Printf("iTunes subcategory added to database: %s", cat.Text)
}
}
}
func createFeedItunesOwners(ctx context.Context, feed *gofeed.Feed, itunes db.FeedItune) {
if feed.ITunesExt == nil {
log.Printf("No iTunes owners to add to database")
return
}
if feed.ITunesExt.Owner == nil {
log.Printf("No iTunes owner to add to database")
return
}
_, err := DB.CreateFeedItunesOwner(ctx, db.CreateFeedItunesOwnerParams{
CreatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
UpdatedAt: pgtype.Timestamptz{Time: time.Now(), Valid: true},
DeletedAt: pgtype.Timestamptz{Valid: false},
Name: pgtype.Text{String: feed.ITunesExt.Owner.Name, Valid: feed.ITunesExt.Owner.Name != ""},
Email: pgtype.Text{String: feed.ITunesExt.Owner.Email, Valid: feed.ITunesExt.Owner.Email != ""},
ItunesID: itunes.ID,
})
if err != nil {
log.Printf("Error adding iTunes owner to database: %s", err)
return
}
log.Printf("iTunes owner added to database: %s", feed.ITunesExt.Owner.Name)
}

78
main.go
View file

@ -1,78 +0,0 @@
package main
import (
"context"
"log"
"os"
"net/http"
"github.com/TheLovinator1/FeedVault/db"
"github.com/jackc/pgx/v5/pgxpool"
_ "github.com/joho/godotenv/autoload"
)
var (
dbpool *pgxpool.Pool
DB *db.Queries
)
// Connect to our PostgreSQL database and store the connection pool in the DB variable that we can use throughout our application.
func init() {
ctx := context.Background()
// Open a database connection
databaseURL := os.Getenv("DATABASE_URL")
if databaseURL == "" {
databaseURL = "postgresql://localhost/feedvault?user=feedvault&password=feedvault"
}
log.Printf("Connecting to database: %s", databaseURL)
dbpool, err := pgxpool.New(ctx, databaseURL)
if err != nil {
log.Fatalf("pgxpool.New(): %v", err)
}
// Create a new DB object
DB = db.New(dbpool)
// Test the connection
if err := dbpool.Ping(ctx); err != nil {
log.Fatalf("dbpool.Ping(): %v", err)
}
}
func main() {
defer dbpool.Close()
log.Print("Starting server")
// Create a new ServeMux
mux := http.NewServeMux()
// Routes
mux.HandleFunc("/", IndexHandler)
mux.HandleFunc("/api", ApiHandler)
mux.HandleFunc("/feeds", FeedsHandler)
mux.HandleFunc("/feed/", FeedHandler)
mux.HandleFunc("/add", AddFeedHandler)
mux.HandleFunc("/upload_opml", UploadOpmlHandler)
// Create server
port := os.Getenv("PORT")
if port == "" {
port = "8000"
}
host := os.Getenv("HOST")
if host == "" {
host = "127.0.0.1"
}
server := &http.Server{
Addr: host + ":" + port,
Handler: mux,
}
log.Print("Server started on http://" + host + ":" + port + " <Ctrl-C> to stop")
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
log.Fatalf("ListenAndServe(): %v", err)
}
}

20
manage.py Normal file
View file

@ -0,0 +1,20 @@
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main() -> None:
"""Run administrative tasks."""
os.environ.setdefault(key="DJANGO_SETTINGS_MODULE", value="feedvault.settings")
try:
from django.core.management import execute_from_command_line # noqa: PLC0415
except ImportError as exc:
msg = "Couldn't import Django. Have you run `poetry install` or `poetry shell`?"
raise ImportError(msg) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()

94
opml.go
View file

@ -1,94 +0,0 @@
package main
import "encoding/xml"
type opml struct {
Head struct {
Title string `xml:"title"`
} `xml:"head"`
Body Body `xml:"body"`
}
type Body struct {
Outlines []Outline `xml:"outline"`
}
type Outline struct {
Outlines []Outline `xml:"outline"`
XmlUrl string `xml:"xmlUrl,attr,omitempty"`
HtmlUrl string `xml:"htmlUrl,attr,omitempty"`
}
func (o *opml) ParseString(s string) error {
return xml.Unmarshal([]byte(s), o)
}
func (o *opml) String() (string, error) {
b, err := xml.Marshal(o)
if err != nil {
return "", err
}
return xml.Header + string(b), nil
}
type linksFromOpml struct {
XMLLinks []string `json:"xmlLinks"`
HTMLLinks []string `json:"htmlLinks"`
}
func RemoveDuplicates(s []string) []string {
seen := make(map[string]struct{}, len(s))
j := 0
for _, v := range s {
if _, ok := seen[v]; ok {
continue
}
seen[v] = struct{}{}
s[j] = v
j++
}
return s[:j]
}
func ParseOpml(s string) (linksFromOpml, error) {
// Get all the feeds from the OPML and return them as linksFromOpml
opml := &opml{}
err := opml.ParseString(s)
if err != nil {
return linksFromOpml{}, err
}
links := linksFromOpml{}
for _, outline := range opml.Body.Outlines {
links.XMLLinks = append(links.XMLLinks, outline.XmlUrl)
links.HTMLLinks = append(links.HTMLLinks, outline.HtmlUrl)
}
// Also check outlines for nested outlines
for _, outline := range opml.Body.Outlines {
for _, nestedOutline := range outline.Outlines {
links.XMLLinks = append(links.XMLLinks, nestedOutline.XmlUrl)
links.HTMLLinks = append(links.HTMLLinks, nestedOutline.HtmlUrl)
}
}
// Remove any empty strings
for i := 0; i < len(links.XMLLinks); i++ {
if links.XMLLinks[i] == "" {
links.XMLLinks = append(links.XMLLinks[:i], links.XMLLinks[i+1:]...)
i--
}
}
for i := 0; i < len(links.HTMLLinks); i++ {
if links.HTMLLinks[i] == "" {
links.HTMLLinks = append(links.HTMLLinks[:i], links.HTMLLinks[i+1:]...)
i--
}
}
// Remove any duplicates
links.XMLLinks = RemoveDuplicates(links.XMLLinks)
links.HTMLLinks = RemoveDuplicates(links.HTMLLinks)
return links, nil
}

View file

@ -1,109 +0,0 @@
package main
import (
"testing"
)
var opmlExample = `<?xml version="1.0" encoding="utf-8"?>
<opml version="1.0">
<head>
<title>My Feeds</title>
</head>
<body>
<outline text="24 ways" htmlUrl="http://24ways.org/" type="rss" xmlUrl="http://feeds.feedburner.com/24ways"/>
<outline text="Writing — by Jan" htmlUrl="http://writing.jan.io/" type="rss" xmlUrl="http://writing.jan.io/feed.xml"/>
</body>
</opml>
`
var secondOpmlExample = `<?xml version="1.0" encoding="UTF-8"?>
<opml version="1.0">
<head>
<title>Engineering Blogs</title>
</head>
<body>
<outline text="Engineering Blogs" title="Engineering Blogs">
<outline type="rss" text="8th Light" title="8th Light" xmlUrl="https://8thlight.com/blog/feed/atom.xml" htmlUrl="https://8thlight.com/blog/"/>
<outline type="rss" text="A" title="A" xmlUrl="http://www.vertabelo.com/_rss/blog.xml" htmlUrl="http://www.vertabelo.com/blog"/>
</outline>
</body>
</opml>
`
// Test the opml parser
func TestParseOpml(t *testing.T) {
links, err := ParseOpml(opmlExample)
if err != nil {
t.Error(err)
}
if len(links.XMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.XMLLinks))
}
if len(links.HTMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.HTMLLinks))
}
// Test that the links are unique
links.XMLLinks = RemoveDuplicates(links.XMLLinks)
links.HTMLLinks = RemoveDuplicates(links.HTMLLinks)
if len(links.XMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.XMLLinks))
}
if len(links.HTMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.HTMLLinks))
}
// Test that the links are correct
if links.XMLLinks[0] != "http://feeds.feedburner.com/24ways" {
t.Errorf("Expected http://feeds.feedburner.com/24ways, got %s", links.XMLLinks[0])
}
if links.XMLLinks[1] != "http://writing.jan.io/feed.xml" {
t.Errorf("Expected http://writing.jan.io/feed.xml, got %s", links.XMLLinks[1])
}
if links.HTMLLinks[0] != "http://24ways.org/" {
t.Errorf("Expected http://24ways.org/, got %s", links.HTMLLinks[0])
}
if links.HTMLLinks[1] != "http://writing.jan.io/" {
t.Errorf("Expected http://writing.jan.io/, got %s", links.HTMLLinks[1])
}
}
// Test the opml parser with nested outlines
func TestParseOpmlNested(t *testing.T) {
links, err := ParseOpml(secondOpmlExample)
if err != nil {
t.Error(err)
}
if len(links.XMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.XMLLinks))
}
if len(links.HTMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.HTMLLinks))
}
// Test that the links are unique
links.XMLLinks = RemoveDuplicates(links.XMLLinks)
links.HTMLLinks = RemoveDuplicates(links.HTMLLinks)
if len(links.XMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.XMLLinks))
}
if len(links.HTMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.HTMLLinks))
}
// Test that the links are correct
if links.XMLLinks[0] != "https://8thlight.com/blog/feed/atom.xml" {
t.Errorf("Expected https://8thlight.com/blog/feed/atom.xml, got %s", links.XMLLinks[0])
}
if links.XMLLinks[1] != "http://www.vertabelo.com/_rss/blog.xml" {
t.Errorf("Expected http://www.vertabelo.com/_rss/blog.xml, got %s", links.XMLLinks[1])
}
if links.HTMLLinks[0] != "https://8thlight.com/blog/" {
t.Errorf("Expected https://8thlight.com/blog/, got %s", links.HTMLLinks[0])
}
if links.HTMLLinks[1] != "http://www.vertabelo.com/blog" {
t.Errorf("Expected http://www.vertabelo.com/blog, got %s", links.HTMLLinks[1])
}
}

131
poetry.lock generated Normal file
View file

@ -0,0 +1,131 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "asgiref"
version = "3.7.2"
description = "ASGI specs, helper code, and adapters"
optional = false
python-versions = ">=3.7"
files = [
{file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"},
{file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"},
]
[package.extras]
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
[[package]]
name = "django"
version = "5.0.2"
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
optional = false
python-versions = ">=3.10"
files = [
{file = "Django-5.0.2-py3-none-any.whl", hash = "sha256:56ab63a105e8bb06ee67381d7b65fe6774f057e41a8bab06c8020c8882d8ecd4"},
{file = "Django-5.0.2.tar.gz", hash = "sha256:b5bb1d11b2518a5f91372a282f24662f58f66749666b0a286ab057029f728080"},
]
[package.dependencies]
asgiref = ">=3.7.0,<4"
sqlparse = ">=0.3.1"
tzdata = {version = "*", markers = "sys_platform == \"win32\""}
[package.extras]
argon2 = ["argon2-cffi (>=19.1.0)"]
bcrypt = ["bcrypt"]
[[package]]
name = "feedparser"
version = "6.0.11"
description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds"
optional = false
python-versions = ">=3.6"
files = [
{file = "feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45"},
{file = "feedparser-6.0.11.tar.gz", hash = "sha256:c9d0407b64c6f2a065d0ebb292c2b35c01050cc0dc33757461aaabdc4c4184d5"},
]
[package.dependencies]
sgmllib3k = "*"
[[package]]
name = "python-dotenv"
version = "1.0.1"
description = "Read key-value pairs from a .env file and set them as environment variables"
optional = false
python-versions = ">=3.8"
files = [
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
]
[package.extras]
cli = ["click (>=5.0)"]
[[package]]
name = "ruff"
version = "0.2.2"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0a9efb032855ffb3c21f6405751d5e147b0c6b631e3ca3f6b20f917572b97eb6"},
{file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d450b7fbff85913f866a5384d8912710936e2b96da74541c82c1b458472ddb39"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecd46e3106850a5c26aee114e562c329f9a1fbe9e4821b008c4404f64ff9ce73"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e22676a5b875bd72acd3d11d5fa9075d3a5f53b877fe7b4793e4673499318ba"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1695700d1e25a99d28f7a1636d85bafcc5030bba9d0578c0781ba1790dbcf51c"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b0c232af3d0bd8f521806223723456ffebf8e323bd1e4e82b0befb20ba18388e"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f63d96494eeec2fc70d909393bcd76c69f35334cdbd9e20d089fb3f0640216ca"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a61ea0ff048e06de273b2e45bd72629f470f5da8f71daf09fe481278b175001"},
{file = "ruff-0.2.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1439c8f407e4f356470e54cdecdca1bd5439a0673792dbe34a2b0a551a2fe3"},
{file = "ruff-0.2.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:940de32dc8853eba0f67f7198b3e79bc6ba95c2edbfdfac2144c8235114d6726"},
{file = "ruff-0.2.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c126da55c38dd917621552ab430213bdb3273bb10ddb67bc4b761989210eb6e"},
{file = "ruff-0.2.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3b65494f7e4bed2e74110dac1f0d17dc8e1f42faaa784e7c58a98e335ec83d7e"},
{file = "ruff-0.2.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1ec49be4fe6ddac0503833f3ed8930528e26d1e60ad35c2446da372d16651ce9"},
{file = "ruff-0.2.2-py3-none-win32.whl", hash = "sha256:d920499b576f6c68295bc04e7b17b6544d9d05f196bb3aac4358792ef6f34325"},
{file = "ruff-0.2.2-py3-none-win_amd64.whl", hash = "sha256:cc9a91ae137d687f43a44c900e5d95e9617cb37d4c989e462980ba27039d239d"},
{file = "ruff-0.2.2-py3-none-win_arm64.whl", hash = "sha256:c9d15fc41e6054bfc7200478720570078f0b41c9ae4f010bcc16bd6f4d1aacdd"},
{file = "ruff-0.2.2.tar.gz", hash = "sha256:e62ed7f36b3068a30ba39193a14274cd706bc486fad521276458022f7bccb31d"},
]
[[package]]
name = "sgmllib3k"
version = "1.0.0"
description = "Py3k port of sgmllib."
optional = false
python-versions = "*"
files = [
{file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"},
]
[[package]]
name = "sqlparse"
version = "0.4.4"
description = "A non-validating SQL parser."
optional = false
python-versions = ">=3.5"
files = [
{file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"},
{file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"},
]
[package.extras]
dev = ["build", "flake8"]
doc = ["sphinx"]
test = ["pytest", "pytest-cov"]
[[package]]
name = "tzdata"
version = "2024.1"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
files = [
{file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
]
[metadata]
lock-version = "2.0"
python-versions = "^3.12"
content-hash = "2009fc28de585811e28f244d45005b50835cb752c757761f61215577699b4cef"

49
pyproject.toml Normal file
View file

@ -0,0 +1,49 @@
[tool.poetry]
name = "feedvault"
version = "0.1.0"
description = "FeedVault is a Django app for archiving RSS feeds."
authors = ["Joakim Hellsén <tlovinator@gmail.com>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.12"
django = "^5.0.2"
python-dotenv = "^1.0.1"
feedparser = "^6.0.11"
[tool.poetry.group.dev.dependencies]
ruff = "^0.2.2"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.ruff]
exclude = ["migrations"]
fix = true
unsafe-fixes = true
preview = true
lint.select = ["ALL"]
lint.ignore = [
"PLR6301", # Checks for the presence of unused self parameter in methods definitions
"CPY001", # Missing copyright notice at top of file
"ERA001", # Found commented-out code
"FIX002", # Line contains TODO
"D104", # Missing docstring in public package # TODO(TheLovinator): Fix this
"D100", # Missing docstring in public module # TODO(TheLovinator): Fix this
# https://github.com/TheLovinator1/panso.se/issues/25
]
line-length = 120
[tool.ruff.lint.pydocstyle]
convention = "google"
[tool.ruff.lint.per-file-ignores]
"**/tests.py" = [
"S101", # Allow asserts
"ARG", # Allow unused arguments
"FBT", # Don't care about booleans as positional arguments in tests, e.g. via @pytest.mark.parametrize()
"PLR2004", # Allow "assert response.status_code == 200" when testing views
"D102", # Allow missing docstrings in tests
"PLR6301", # Checks for the presence of unused self parameter in methods definitions.
]

View file

@ -1,56 +0,0 @@
package main
// "Fun" messages that will be displayed in the footer
var FunMsg = []string{
"Web scraping is not a crime.",
"Made in Sweden.",
"🙃",
"Hello.",
"<3",
"So many feeds, so little time.",
"A feed in the hand is worth two in the bush.",
"Death begets death begets death.",
"I am Eo's dream.",
"Freedom in an unjust system is no freedom at all.",
"Omnis vir lupus.",
"Shit escalates.",
"Break the chains, my love.",
"Sharpened by hate. Strengthened by love.",
"Hic sunt leones.",
"Keyboard not found. Press F1 to continue.",
"The stars shine brighter when shared among comrades.",
"Zzz... 🛌",
"Generated in 0 ms.",
"Open source, open heart.",
"RSS is the new black.",
"Unsubscribe.",
"ChatGPT made 99% of this website :-)",
"👁️👄👁️",
"From each planet, to each star—equality in the cosmos.",
"In the vastness of space, no one should own more than they can share.",
"Workers of the universe, unite! The stars are our common heritage.",
"Space is for all, not just the privileged few.",
"From the red planet to the black hole, solidarity knows no borders.",
"Astronauts of the world, unite for a cosmic revolution!",
"Space is for everyone, not just the 1%.",
"No class struggle in zero gravity.",
"Only solidarity among the cosmic proletariat.",
"The red glow of the stars reflects the spirit of collective effort.",
"The final frontier is a shared frontier, where no one is left behind.",
"Vote for a space utopia!",
"From the Milky Way to Andromeda, the stars belong to the people.",
"Space is for the workers, not the bosses.",
"Let the fruits of progress be the common heritage of all.",
"From the moon to the asteroid belt, the cosmos is for the common good.",
"The stars do not discriminate; neither should we.",
"In the vacuum of space, let equality fill the void.",
"From Big Bang to the heat death of the universe, solidarity is eternal.",
"In dark times, should the stars also go out?",
"Un Jour Je Serai de Retour Prés de Toi",
"You should build Space Communism — precisely *because* it's impossible.",
"She thinks you are an idiot, sire.",
"The song of death is sweet and endless.",
"Child-murdering billionaires still rule the world with a shit-eating grin.",
"Eight billion people - and you failed every single one of them.",
"You are the first crack. From you it will spread.",
}

View file

@ -1,605 +0,0 @@
-- name: CreateFeed :one
INSERT INTO
feeds (
"url",
created_at,
updated_at,
deleted_at,
title,
"description",
link,
feed_link,
links,
updated,
updated_parsed,
published,
published_parsed,
"language",
copyright,
generator,
categories,
custom,
feed_type,
feed_version
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14,
$15,
$16,
$17,
$18,
$19,
$20
)
RETURNING
*;
-- name: CountFeeds :one
SELECT
COUNT(*)
FROM
feeds;
-- name: CreateItem :one
INSERT INTO
items (
created_at,
updated_at,
deleted_at,
title,
"description",
content,
link,
links,
updated,
updated_parsed,
published,
published_parsed,
"guid",
categories,
custom,
feed_id
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14,
$15,
$16
)
RETURNING
*;
-- name: CountItems :one
SELECT
COUNT(*)
FROM
items;
-- name: GetFeed :one
SELECT
*
FROM
feeds
WHERE
id = $1;
-- name: GetFeeds :many
SELECT
*
FROM
feeds
ORDER BY
created_at DESC
LIMIT
$1
OFFSET
$2;
-- name: GetItem :one
SELECT
*
FROM
items
WHERE
id = $1;
-- name: GetItems :many
SELECT
*
FROM
items
WHERE
feed_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;
-- name: CreateFeedExtension :one
INSERT INTO
feed_extensions (
created_at,
updated_at,
deleted_at,
"name",
"value",
attrs,
children,
feed_id
)
VALUES
($1, $2, $3, $4, $5, $6, $7, $8)
RETURNING
*;
-- name: CreateItemExtension :one
INSERT INTO
item_extensions (
created_at,
updated_at,
deleted_at,
"name",
"value",
attrs,
children,
item_id
)
VALUES
($1, $2, $3, $4, $5, $6, $7, $8)
RETURNING
*;
-- name: GetFeedExtensions :many
SELECT
*
FROM
feed_extensions
WHERE
feed_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;
-- name: GetItemExtensions :many
SELECT
*
FROM
item_extensions
WHERE
item_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;
-- name: CreateFeedAuthor :one
INSERT INTO
feed_authors (
created_at,
updated_at,
deleted_at,
"name",
email,
feed_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
*;
-- name: CreateItemAuthor :one
INSERT INTO
item_authors (
created_at,
updated_at,
deleted_at,
"name",
email,
item_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
*;
-- name: GetFeedAuthors :many
SELECT
*
FROM
feed_authors
WHERE
feed_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;
-- name: GetItemAuthors :many
SELECT
*
FROM
item_authors
WHERE
item_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;
-- name: CreateFeedImage :one
INSERT INTO
feed_images (
created_at,
updated_at,
deleted_at,
"url",
title,
feed_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
*;
-- name: CreateItemImage :one
INSERT INTO
item_images (
created_at,
updated_at,
deleted_at,
"url",
title,
item_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
*;
-- name: GetFeedImages :many
SELECT
*
FROM
feed_images
WHERE
feed_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;
-- name: GetItemImages :many
SELECT
*
FROM
item_images
WHERE
item_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;
-- name: CreateFeedDublinCore :one
INSERT INTO
feed_dublin_cores (
created_at,
updated_at,
deleted_at,
title,
creator,
author,
"subject",
"description",
publisher,
contributor,
"date",
"type",
format,
identifier,
source,
"language",
relation,
coverage,
rights,
feed_id
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14,
$15,
$16,
$17,
$18,
$19,
$20
)
RETURNING
*;
-- name: CreateItemDublinCore :one
INSERT INTO
item_dublin_cores (
created_at,
updated_at,
deleted_at,
title,
creator,
author,
"subject",
"description",
publisher,
contributor,
"date",
"type",
format,
identifier,
source,
"language",
relation,
coverage,
rights,
item_id
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14,
$15,
$16,
$17,
$18,
$19,
$20
)
RETURNING
*;
-- name: GetFeedDublinCores :many
SELECT
*
FROM
feed_dublin_cores
WHERE
feed_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;
-- name: GetItemDublinCores :many
SELECT
*
FROM
item_dublin_cores
WHERE
item_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;
-- name: CreateFeedItunes :one
INSERT INTO
feed_itunes (
created_at,
updated_at,
deleted_at,
author,
"block",
"explicit",
keywords,
subtitle,
summary,
"image",
complete,
new_feed_url,
"type",
feed_id
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14
)
RETURNING
*;
-- name: CreateItemItunes :one
INSERT INTO
item_itunes (
created_at,
updated_at,
deleted_at,
author,
"block",
"explicit",
keywords,
subtitle,
summary,
"image",
is_closed_captioned,
episode,
season,
"order",
episode_type,
item_id
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10,
$11,
$12,
$13,
$14,
$15,
$16
)
RETURNING
*;
-- name: GetFeedItunes :one
SELECT
*
FROM
feed_itunes
WHERE
feed_id = $1;
-- name: GetItemItunes :one
SELECT
*
FROM
item_itunes
WHERE
item_id = $1;
-- name: CreateFeedItunesCategory :one
INSERT INTO
feed_itunes_categories (
created_at,
updated_at,
deleted_at,
"text",
subcategory,
itunes_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
*;
-- name: CreateFeedItunesOwner :one
INSERT INTO
feed_itunes_owners (
created_at,
updated_at,
deleted_at,
email,
"name",
itunes_id
)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING
*;
-- name: GetFeedItunesCategories :many
SELECT
*
FROM
feed_itunes_categories
WHERE
itunes_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;
-- name: GetFeedItunesOwners :many
SELECT
*
FROM
feed_itunes_owners
WHERE
itunes_id = $1
ORDER BY
created_at DESC
LIMIT
$2
OFFSET
$3;

View file

@ -1,2 +0,0 @@
-- name: DBSize :one
SELECT pg_size_pretty(pg_database_size(current_database()));

View file

@ -1,73 +0,0 @@
-- +goose Up
-- +goose StatementBegin
-- Create table feeds if not exists
CREATE TABLE IF NOT EXISTS feeds (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
"url" TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed: https://github.com/mmcdole/gofeed/blob/master/feed.go
title TEXT,
"description" TEXT,
link TEXT,
feed_link TEXT,
links TEXT [],
updated TEXT,
updated_parsed TIMESTAMPTZ,
published TEXT,
published_parsed TIMESTAMPTZ,
-- Authors - See feed_authors
"language" TEXT,
-- Image - See feed_images
copyright TEXT,
generator TEXT,
categories TEXT [],
-- Dublin Core - See feed_dublin_cores
-- Itunes - See feed_itunes
-- Extensions - See feed_extensions
custom JSONB,
-- Items - See items
feed_type TEXT,
feed_version TEXT
);
-- Feed item
-- https://github.com/mmcdole/gofeed/blob/master/feed.go#L49
CREATE TABLE IF NOT EXISTS items (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
title TEXT,
"description" TEXT,
content TEXT,
link TEXT,
links TEXT [],
updated TEXT,
updated_parsed TIMESTAMPTZ,
published TEXT,
published_parsed TIMESTAMPTZ,
-- Authors - See item_authors
"guid" TEXT,
-- Image - See item_images
categories TEXT [],
-- Enclosures - See enclosures
-- Dublin Core - See item_dublin_cores
-- Itunes - See item_itunes
-- Extensions - See item_extensions
custom JSONB,
-- Link to feed
feed_id BIGINT NOT NULL,
CONSTRAINT fk_feed_id FOREIGN KEY (feed_id) REFERENCES feeds (id) ON DELETE CASCADE
);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS feeds CASCADE;
DROP TABLE IF EXISTS items CASCADE;
-- +goose StatementEnd

View file

@ -1,44 +0,0 @@
-- +goose Up
-- +goose StatementBegin
-- Extensions for feeds
-- https://github.com/mmcdole/gofeed/blob/master/extensions/extensions.go#L3
CREATE TABLE IF NOT EXISTS feed_extensions (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
"name" TEXT,
"value" TEXT,
attrs JSONB,
children JSONB,
-- Link to feed
feed_id BIGINT NOT NULL,
CONSTRAINT fk_feed_id FOREIGN KEY (feed_id) REFERENCES feeds (id) ON DELETE CASCADE
);
-- Extensions for items
-- https://github.com/mmcdole/gofeed/blob/master/extensions/extensions.go#L3
CREATE TABLE IF NOT EXISTS item_extensions (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
"name" TEXT,
"value" TEXT,
attrs JSONB,
children JSONB,
-- Link to feed item (Also called feed entry)
item_id BIGINT NOT NULL,
CONSTRAINT fk_item_id FOREIGN KEY (item_id) REFERENCES items (id) ON DELETE CASCADE
);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS feed_extensions CASCADE;
DROP TABLE IF EXISTS item_extensions CASCADE;
-- +goose StatementEnd

View file

@ -1,40 +0,0 @@
-- +goose Up
-- +goose StatementBegin
-- Person for feeds
-- https://github.com/mmcdole/gofeed/blob/master/feed.go#L73
CREATE TABLE IF NOT EXISTS feed_authors (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
"name" TEXT,
email TEXT,
-- Link to feed
feed_id BIGINT NOT NULL,
CONSTRAINT fk_feed_id FOREIGN KEY (feed_id) REFERENCES feeds (id) ON DELETE CASCADE
);
-- Person for items
-- https://github.com/mmcdole/gofeed/blob/master/feed.go#L73
CREATE TABLE IF NOT EXISTS item_authors (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
"name" TEXT,
email TEXT,
-- Link to feed item (Also called feed entry)
item_id BIGINT NOT NULL,
CONSTRAINT fk_item_id FOREIGN KEY (item_id) REFERENCES items (id) ON DELETE CASCADE
);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS feed_authors CASCADE;
DROP TABLE IF EXISTS item_authors CASCADE;
-- +goose StatementEnd

View file

@ -1,40 +0,0 @@
-- +goose Up
-- +goose StatementBegin
-- Image for feeds
-- https://github.com/mmcdole/gofeed/blob/master/feed.go#L80
CREATE TABLE IF NOT EXISTS feed_images (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
"url" TEXT,
title TEXT,
-- Link to feed
feed_id BIGINT NOT NULL,
CONSTRAINT fk_feed_id FOREIGN KEY (feed_id) REFERENCES feeds (id) ON DELETE CASCADE
);
-- Image for items
-- https://github.com/mmcdole/gofeed/blob/master/feed.go#L80
CREATE TABLE IF NOT EXISTS item_images (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
"url" TEXT,
title TEXT,
-- Link to feed item (Also called feed entry)
item_id BIGINT NOT NULL,
CONSTRAINT fk_item_id FOREIGN KEY (item_id) REFERENCES items (id) ON DELETE CASCADE
);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS feed_images CASCADE;
DROP TABLE IF EXISTS item_images CASCADE;
-- +goose StatementEnd

View file

@ -1,68 +0,0 @@
-- +goose Up
-- +goose StatementBegin
-- Dublin Core for feeds
-- https://github.com/mmcdole/gofeed/blob/master/extensions/dublincore.go#L5
CREATE TABLE IF NOT EXISTS feed_dublin_cores (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
title TEXT [],
creator TEXT [],
author TEXT [],
"subject" TEXT [],
"description" TEXT [],
publisher TEXT [],
contributor TEXT [],
"date" TEXT [],
"type" TEXT [],
format TEXT [],
identifier TEXT [],
source TEXT [],
"language" TEXT [],
relation TEXT [],
coverage TEXT [],
rights TEXT [],
-- Link to feed
feed_id BIGINT NOT NULL,
CONSTRAINT fk_feed_id FOREIGN KEY (feed_id) REFERENCES feeds (id) ON DELETE CASCADE
);
-- Dublin Core for items
-- https://github.com/mmcdole/gofeed/blob/master/extensions/dublincore.go#L5
CREATE TABLE IF NOT EXISTS item_dublin_cores (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
title TEXT [],
creator TEXT [],
author TEXT [],
"subject" TEXT [],
"description" TEXT [],
publisher TEXT [],
contributor TEXT [],
"date" TEXT [],
"type" TEXT [],
format TEXT [],
identifier TEXT [],
source TEXT [],
"language" TEXT [],
relation TEXT [],
coverage TEXT [],
rights TEXT [],
-- Link to feed item (Also called feed entry)
item_id BIGINT NOT NULL,
CONSTRAINT fk_item_id FOREIGN KEY (item_id) REFERENCES items (id) ON DELETE CASCADE
);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS feed_dublin_cores CASCADE;
DROP TABLE IF EXISTS item_dublin_cores CASCADE;
-- +goose StatementEnd

View file

@ -1,96 +0,0 @@
-- +goose Up
-- +goose StatementBegin
-- Itunes for feeds
-- https://github.com/mmcdole/gofeed/blob/master/extensions/itunes.go#L5
CREATE TABLE IF NOT EXISTS feed_itunes (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
author TEXT,
"block" TEXT,
-- Categories - See feed_itunes_categories
"explicit" TEXT,
keywords TEXT,
-- Owner - See feed_itunes_owners
subtitle TEXT,
summary TEXT,
"image" TEXT,
complete TEXT,
new_feed_url TEXT,
"type" TEXT,
-- Link to feed
feed_id BIGINT NOT NULL,
CONSTRAINT fk_feed_id FOREIGN KEY (feed_id) REFERENCES feeds (id) ON DELETE CASCADE
);
-- Itunes for items
-- https://github.com/mmcdole/gofeed/blob/master/extensions/itunes.go#L22
CREATE TABLE IF NOT EXISTS item_itunes (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
author TEXT,
"block" TEXT,
duration TEXT,
"explicit" TEXT,
keywords TEXT,
subtitle TEXT,
summary TEXT,
"image" TEXT,
is_closed_captioned TEXT,
episode TEXT,
season TEXT,
"order" TEXT,
episode_type TEXT,
-- Link to feed item (Also called feed entry)
item_id BIGINT NOT NULL,
CONSTRAINT fk_item_id FOREIGN KEY (item_id) REFERENCES items (id) ON DELETE CASCADE
);
-- Itunes categories for feeds
-- https://github.com/mmcdole/gofeed/blob/master/extensions/itunes.go#L39
CREATE TABLE IF NOT EXISTS feed_itunes_categories (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
"text" TEXT,
subcategory BIGINT,
-- Link to itunes
itunes_id BIGINT NOT NULL,
CONSTRAINT fk_itunes_id FOREIGN KEY (itunes_id) REFERENCES feed_itunes (id) ON DELETE CASCADE,
CONSTRAINT fk_subcategory_id FOREIGN KEY (subcategory) REFERENCES feed_itunes_categories (id) ON DELETE SET NULL
);
-- Itunes owners
-- https://github.com/mmcdole/gofeed/blob/master/extensions/itunes.go#L45
CREATE TABLE IF NOT EXISTS feed_itunes_owners (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
email TEXT,
"name" TEXT,
-- Link to itunes
itunes_id BIGINT NOT NULL,
CONSTRAINT fk_itunes_id FOREIGN KEY (itunes_id) REFERENCES feed_itunes (id) ON DELETE CASCADE
);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS feed_itunes CASCADE;
DROP TABLE IF EXISTS item_itunes CASCADE;
DROP TABLE IF EXISTS feed_itunes_categories CASCADE;
DROP TABLE IF EXISTS feed_itunes_owners CASCADE;
-- +goose StatementEnd

View file

@ -1,24 +0,0 @@
-- +goose Up
-- +goose StatementBegin
-- Enclosures - Only for items
-- https://github.com/mmcdole/gofeed/blob/master/feed.go#L86
CREATE TABLE IF NOT EXISTS enclosures (
id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at TIMESTAMPTZ DEFAULT NULL,
-- From gofeed:
"url" TEXT,
"length" TEXT,
"type" TEXT,
-- Link to feed item (Also called feed entry)
item_id BIGINT NOT NULL,
CONSTRAINT fk_item_id FOREIGN KEY (item_id) REFERENCES items (id) ON DELETE CASCADE
);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE IF EXISTS enclosures CASCADE;
-- +goose StatementEnd

View file

@ -1,12 +0,0 @@
version: "2"
sql:
- engine: "postgresql"
queries: "sql/queries"
schema: "sql/schema"
gen:
go:
out: "db"
sql_package: "pgx/v5"
emit_prepared_queries: true
emit_empty_slices: true
emit_json_tags: true

View file

@ -1,34 +0,0 @@
package main
import (
"context"
"time"
)
type Cache struct {
timestamp time.Time
data string
}
var cache Cache
func GetDBSize() (string, error) {
// If the cache is older than 5 minutes, get the database size from the database
if time.Since(cache.timestamp) > 5*time.Minute {
dbSize, err := getDBSizeFromDB()
if err != nil {
return "", err
}
cache = Cache{timestamp: time.Now(), data: dbSize}
}
return cache.data, nil
}
func getDBSizeFromDB() (string, error) {
// Get database size from the PostgreSQL database
dbSize, err := DB.DBSize(context.Background())
if err != nil {
return "", err
}
return dbSize, nil
}

View file

@ -1,133 +0,0 @@
package main
import (
"errors"
"net"
"net/http"
"net/url"
"strings"
)
// Run some simple validation on the URL
func ValidateFeedURL(feed_url string) error {
// Check if URL starts with http or https
if !strings.HasPrefix(feed_url, "http://") && !strings.HasPrefix(feed_url, "https://") {
return errors.New("URL must start with http:// or https://")
}
// Parse a url into a URL structure
u, err := url.Parse(feed_url)
if err != nil {
return errors.New("failed to parse URL")
}
// Get the domain from the URL
domain := u.Hostname()
domain = strings.TrimSpace(domain)
if domain == "" {
return errors.New("URL does not contain a domain")
}
// Don't allow IP address URLs
ip := net.ParseIP(domain)
if ip != nil {
return errors.New("IP address URLs are not allowed")
}
// Don't allow local URLs (e.g. router URLs)
// Taken from https://github.com/uBlockOrigin/uAssets/blob/master/filters/lan-block.txt
// https://github.com/gwarser/filter-lists
localURLs := []string{
"[::]",
"[::1]",
"airbox.home",
"airport",
"arcor.easybox",
"aterm.me",
"bthomehub.home",
"bthub.home",
"congstar.box",
"connect.box",
"console.gl-inet.com",
"easy.box",
"etxr",
"fire.walla",
"fritz.box",
"fritz.nas",
"fritz.repeater",
"giga.cube",
"hi.link",
"hitronhub.home",
"home.arpa",
"homerouter.cpe",
"host.docker.internal",
"huaweimobilewifi.com",
"localbattle.net",
"localhost",
"mobile.hotspot",
"myfritz.box",
"ntt.setup",
"pi.hole",
"plex.direct",
"repeater.asus.com",
"router.asus.com",
"routerlogin.com",
"routerlogin.net",
"samsung.router",
"speedport.ip",
"steamloopback.host",
"tplinkap.net",
"tplinkeap.net",
"tplinkmodem.net",
"tplinkplclogin.net",
"tplinkrepeater.net",
"tplinkwifi.net",
"web.setup.home",
"web.setup",
}
for _, localURL := range localURLs {
if strings.Contains(domain, localURL) {
return errors.New("local URLs are not allowed")
}
}
// Don't allow URLs that end with .local
if strings.HasSuffix(domain, ".local") {
return errors.New("URLs ending with .local are not allowed")
}
// Don't allow URLs that end with .onion
if strings.HasSuffix(domain, ".onion") {
return errors.New("URLs ending with .onion are not allowed")
}
// Don't allow URLs that end with .home.arpa
if strings.HasSuffix(domain, ".home.arpa") {
return errors.New("URLs ending with .home.arpa are not allowed")
}
// Don't allow URLs that end with .internal
// Docker uses host.docker.internal
if strings.HasSuffix(domain, ".internal") {
return errors.New("URLs ending with .internal are not allowed")
}
// Don't allow URLs that end with .localdomain
if strings.HasSuffix(domain, ".localdomain") {
return errors.New("URLs ending with .localdomain are not allowed")
}
// Check if the domain is resolvable
_, err = net.LookupIP(domain)
if err != nil {
return errors.New("failed to resolve domain")
}
// Check if the URL is reachable
_, err = http.Get(feed_url)
if err != nil {
return errors.New("failed to reach URL")
}
return nil
}

View file

@ -1,246 +0,0 @@
package main
import (
"testing"
)
// URL starts with http://
func TestURLStartsWithHTTP(t *testing.T) {
url := "http://example.com"
err := ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// URL starts with https://
func TestURLStartsWithHTTPS(t *testing.T) {
url := "https://example.com"
err := ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// URL contains a valid domain
func TestURLContainsValidDomain(t *testing.T) {
url := "http://example.com"
err := ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// URL is empty
func TestURLEmpty(t *testing.T) {
url := ""
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL does not contain a domain
func TestURLNotNumbers(t *testing.T) {
url := "12345"
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL is not a valid URL
func TestURLNotValidURL(t *testing.T) {
url := "example.com"
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// Domain is resolvable
func TestDomainIsResolvable(t *testing.T) {
url := "http://example.com"
err := ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// Domain does not end with .local
func TestDomainDoesNotEndWithLocal(t *testing.T) {
url := "http://example.com"
err := ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// Domain is not localhost
func TestDomainIsNotLocalhost(t *testing.T) {
url := "http://example.com"
err := ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// Domain is not an IP address
func TestDomainIsNotIPAddress(t *testing.T) {
url := "http://example.com"
err := ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// URL is a file path
func TestURLIsFilePath(t *testing.T) {
url := "/path/to/file"
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL is a relative path
func TestURLIsRelativePath(t *testing.T) {
url := "/path/to/resource"
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL is a non-existent domain
func TestNonExistentDomainURL(t *testing.T) {
url := "http://jfsalksajlkfsajklfsajklfllfjffffkfsklslsksassflfskjlfjlfsjkalfsaf.com"
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "failed to resolve domain" {
t.Errorf("Expected error message 'failed to resolve domain', got '%v'", err.Error())
}
}
// URL is a malformed URL
func TestMalformedURL(t *testing.T) {
url := "malformedurl"
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL is a domain that does not support HTTP/HTTPS
func TestURLDomainNotSupportHTTP(t *testing.T) {
url := "ftp://example.com"
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL is an unreachable domain
func TestUnreachableDomain(t *testing.T) {
url := "http://fafsffsfsfsfsafsasafassfs.com"
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "failed to resolve domain" {
t.Errorf("Expected error message 'failed to resolve domain', got '%v'", err.Error())
}
}
// URL is an IP address
func TestURLIsIPAddress(t *testing.T) {
url := "http://84.55.107.42"
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "IP address URLs are not allowed" {
t.Errorf("Expected error message 'IP address URLs are not allowed', got '%v'", err.Error())
}
}
// URL ends with .local
func TestURLEndsWithLocal(t *testing.T) {
url := "http://example.local"
err := ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URLs ending with .local are not allowed" {
t.Errorf("Expected error message 'URLs ending with .local are not allowed', got '%v'", err.Error())
}
}
func TestLocalURLs(t *testing.T) {
localURLs := []string{
"https://localhost",
"https://home.arpa",
"https://airbox.home",
"https://airport",
"https://arcor.easybox",
"https://aterm.me",
"https://bthub.home",
"https://bthomehub.home",
"https://congstar.box",
"https://connect.box",
"https://console.gl-inet.com",
"https://easy.box",
"https://etxr",
"https://fire.walla",
"https://fritz.box",
"https://fritz.nas",
"https://fritz.repeater",
"https://giga.cube",
"https://hi.link",
"https://hitronhub.home",
"https://homerouter.cpe",
"https://huaweimobilewifi.com",
"https://localbattle.net",
"https://myfritz.box",
"https://mobile.hotspot",
"https://ntt.setup",
"https://pi.hole",
"https://plex.direct",
"https://repeater.asus.com",
"https://router.asus.com",
"https://routerlogin.com",
"https://routerlogin.net",
"https://samsung.router",
"https://speedport.ip",
"https://steamloopback.host",
"https://tplinkap.net",
"https://tplinkeap.net",
"https://tplinkmodem.net",
"https://tplinkplclogin.net",
"https://tplinkrepeater.net",
"https://tplinkwifi.net",
"https://web.setup",
"https://web.setup.home",
}
for _, localURL := range localURLs {
err := ValidateFeedURL(localURL)
if err == nil {
t.Errorf("Expected an error for local URL %s, got nil", localURL)
} else if err.Error() != "local URLs are not allowed" {
t.Errorf("Expected error message 'local URLs are not allowed', got '%v'", err.Error())
}
}
}