Refactor folder structure

This commit is contained in:
Joakim Hellsén 2024-02-13 21:21:08 +01:00
commit c7cca02ca7
30 changed files with 610 additions and 933 deletions

View file

View file

@ -79,10 +79,5 @@
"webmail", "webmail",
"XOXO", "XOXO",
"zerolog" "zerolog"
], ]
"terminal.integrated.env.windows": {
"GOOSE_DRIVER": "postgres",
"GOOSE_DBSTRING": "user=feedvault password=feedvault dbname=feedvault sslmode=disable",
"GOOSE_MIGRATION_DIR": "${workspaceFolder}/sql/schema"
}
} }

View file

@ -1,43 +0,0 @@
# Contributing to FeedVault
Feel free to create a pull request for things like bug fixes, new features, and improvements. Your pull request doesn't have to be perfect, it just needs to work (or show what you're thinking). I can help you with the rest if needed. If you're not sure about something, feel free to open an issue first to discuss it.
Please don't add any dependencies unless it's absolutely necessary. I want to try to keep the project using the standard library as much as possible.
We use GitHub issues for tracking requests and bugs, so feel free to open an issue if you have any questions or need help.
Thank you for your contributions!
## Running the project
You can run the project using the following command:
```bash
go run cmd/feedvault/main.go
```
You can also run the tests using:
```bash
go test ./...
```
## Using Docker
We have a [Docker.md](Docker.md) file with instructions on how to run the project using Docker.
## Using sqlc and goose
I use [sqlc](https://docs.sqlc.dev/en/latest/index.html) for generating type safe Go from SQL. Make sure to regenerate the code after changing any SQL queries:
```bash
sqlc generate
```
[goose](https://pressly.github.io/goose/) is used for managing database migrations. To create a new migration, run:
```bash
goose create add_some_column sql
goose status
goose up
```

View file

@ -1,31 +0,0 @@
# Docker Compose
You can run the project using Docker Compose. You can use the following commands to build, run, and stop the project:
```bash
docker compose build
docker compose up
docker compose down
```
## Accessing the database
```bash
docker-compose exec db psql -U feedvault -d feedvault
```
## Environment variables
You can use the following environment variables to configure the project:
- `PORT`: The port to listen on (default: `8000`)
- `DATABASE_URL`: The URL of the database (default: `postgres://feedvault:feedvault@db/feedvault?sslmode=disable`)
- FeedVault only supports PostgreSQL at the moment
- `ADMIN_EMAIL`: The email where we should email errors to.
- `EMAIL_HOST_USER`: The email address to send emails from.
- `EMAIL_HOST_PASSWORD`: The password for the email address to send emails from.
- `EMAIL_HOST`: The SMTP server to send emails through. (default: `smtp.gmail.com`)
- `EMAIL_PORT`: The port to send emails through. (default: `587`)
- `DISCORD_WEBHOOK_URL`: The Discord webhook URL to send messages to.
- `APP_ENV`: The environment the app is running in. Development or Production. (default: `development`)
- `USER_AGENT`: The user agent to use for making requests. (default: `None`)

36
cmd/feedvault/main.go Normal file
View file

@ -0,0 +1,36 @@
package main
import (
"log"
"net/http"
"github.com/TheLovinator1/FeedVault/pkg/handlers"
)
func init() { log.SetFlags(log.LstdFlags | log.Lshortfile) }
func main() {
log.Print("Starting server")
// Create a new ServeMux
mux := http.NewServeMux()
// Routes
mux.HandleFunc("/", handlers.IndexHandler)
mux.HandleFunc("/api", handlers.ApiHandler)
mux.HandleFunc("/feeds", handlers.FeedsHandler)
mux.HandleFunc("/add", handlers.AddFeedHandler)
mux.HandleFunc("/upload_opml", handlers.UploadOpmlHandler)
// Create server
server := &http.Server{
Addr: "127.0.0.1:8000",
Handler: mux,
}
log.Print("Server started on http://localhost:8000/ <Ctrl-C> to stop")
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
log.Fatalf("ListenAndServe(): %v", err)
}
}

17
go.mod
View file

@ -1,20 +1,3 @@
module github.com/TheLovinator1/FeedVault module github.com/TheLovinator1/FeedVault
go 1.22.0 go 1.22.0
require (
github.com/stretchr/testify v1.8.4
github.com/tdewolff/minify/v2 v2.20.16
gorm.io/driver/sqlite v1.5.5
gorm.io/gorm v1.25.7
)
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/mattn/go-sqlite3 v1.14.22 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/tdewolff/parse/v2 v2.7.11 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

27
go.sum
View file

@ -1,27 +0,0 @@
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/tdewolff/minify/v2 v2.20.16 h1:/C8dtRkxLTIyUlKlBz46gDiktCrE8a6+c1gTrnPFz+U=
github.com/tdewolff/minify/v2 v2.20.16/go.mod h1:/FvxV9KaTrFu35J9I2FhRvWSBxcHj8sDSdwBFh5voxM=
github.com/tdewolff/parse/v2 v2.7.11 h1:v+W45LnzmjndVlfqPCT5gGjAAZKd1GJGOPJveTIkBY8=
github.com/tdewolff/parse/v2 v2.7.11/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1aghka0soA=
github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE=
github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739 h1:IkjBCtQOOjIn03u/dMQK9g+Iw9ewps4mCl1nB8Sscbo=
github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/sqlite v1.5.5 h1:7MDMtUZhV065SilG62E0MquljeArQZNfJnjd9i9gx3E=
gorm.io/driver/sqlite v1.5.5/go.mod h1:6NgQ7sQWAIFsPrJJl1lSNSu2TABh0ZZ/zm5fosATavE=
gorm.io/gorm v1.25.7 h1:VsD6acwRjz2zFxGO50gPO6AkNs7KKnvfzUjHQhZDz/A=
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=

View file

@ -1,53 +0,0 @@
package main
import (
"strings"
"testing"
)
// returns a minified version of the input HTML string
func TestMinifyHTML(t *testing.T) {
input := "<html><head><title>Test</title></head><body><h1>Hello, World!</h1></body></html>"
expected := "<title>Test</title><h1>Hello, World!</h1>"
result := minifyHTML(input)
if result != expected {
t.Errorf("Expected minified HTML: %s, but got: %s", expected, result)
}
}
func TestMinifyCSS(t *testing.T) {
cssString := `
body {
background-color: red;
color: blue;
}
`
expected := "body{background-color:red;color:blue}"
result := minifyCSS(cssString)
if result != expected {
t.Errorf("Expected minified CSS string to be %s, but got %s", expected, result)
}
}
// Displays error messages if there are any parse errors
func TestErrorMessages(t *testing.T) {
// Initialize test data
parseResult := []ParseResult{
{IsError: true, Msg: "Error 1"},
{IsError: true, Msg: "Error 2"},
}
h := HTMLData{
ParseResult: parseResult,
}
// Invoke function under test
result := fullHTML(h)
// Assert that the result contains the error messages
if !strings.Contains(result, "Error 1") || !strings.Contains(result, "Error 2") {
t.Errorf("Expected error messages, but got: %s", result)
}
}

55
main.go
View file

@ -1,55 +0,0 @@
package main
import (
"log"
"net/http"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
)
var db *gorm.DB
// Initialize the database
func init() {
var err error
db, err = gorm.Open(sqlite.Open("feedvault.db"), &gorm.Config{})
if err != nil {
panic("Failed to connect to database")
}
if db == nil {
panic("db nil")
}
log.Println("Connected to database")
// Migrate the schema
err = db.AutoMigrate(&Feed{}, &Item{}, &Person{}, &Image{}, &Enclosure{}, &DublinCoreExtension{}, &ITunesFeedExtension{}, &ITunesItemExtension{}, &ITunesCategory{}, &ITunesOwner{}, &Extension{})
if err != nil {
panic("Failed to migrate the database")
}
}
func main() {
log.Println("Starting FeedVault...")
// Create a new ServeMux
mux := http.NewServeMux()
// Routes
mux.HandleFunc("/", IndexHandler)
mux.HandleFunc("/api", ApiHandler)
mux.HandleFunc("/feeds", FeedsHandler)
mux.HandleFunc("/add", AddFeedHandler)
mux.HandleFunc("/upload_opml", UploadOpmlHandler)
// Create server
server := &http.Server{
Addr: "127.0.0.1:8000",
Handler: mux,
}
log.Println("Listening on http://localhost:8000/ <Ctrl-C> to stop")
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
log.Fatalf("Server error: %v", err)
}
}

180
models.go
View file

@ -1,180 +0,0 @@
package main
import (
"net/http"
"time"
"gorm.io/gorm"
)
type Feed struct {
gorm.Model
Title string `json:"title,omitempty"`
Description string `json:"description,omitempty"`
Link string `json:"link,omitempty"`
FeedLink string `json:"feedLink,omitempty"`
Links []string `gorm:"type:text[]" json:"links,omitempty"`
Updated string `json:"updated,omitempty"`
UpdatedParsed *time.Time `json:"updatedParsed,omitempty"`
Published string `json:"published,omitempty"`
PublishedParsed *time.Time `json:"publishedParsed,omitempty"`
Authors []*Person `gorm:"many2many:feed_authors;" json:"authors,omitempty"`
Language string `json:"language,omitempty"`
Image *Image `gorm:"foreignKey:ID" json:"image,omitempty"`
Copyright string `json:"copyright,omitempty"`
Generator string `json:"generator,omitempty"`
Categories []string `gorm:"type:text[]" json:"categories,omitempty"`
DublinCoreExt *DublinCoreExtension `gorm:"foreignKey:ID" json:"dcExt,omitempty"`
ITunesExt *ITunesFeedExtension `gorm:"foreignKey:ID" json:"itunesExt,omitempty"`
Extensions Extensions `gorm:"type:json" json:"extensions,omitempty"`
Custom map[string]string `gorm:"type:json" json:"custom,omitempty"`
Items []*Item `gorm:"foreignKey:ID" json:"items,omitempty"`
FeedType string `json:"feedType"`
FeedVersion string `json:"feedVersion"`
}
type Item struct {
gorm.Model
Title string `json:"title,omitempty"`
Description string `json:"description,omitempty"`
Content string `json:"content,omitempty"`
Link string `json:"link,omitempty"`
Links []string `gorm:"type:text[]" json:"links,omitempty"`
Updated string `json:"updated,omitempty"`
UpdatedParsed *time.Time `json:"updatedParsed,omitempty"`
Published string `json:"published,omitempty"`
PublishedParsed *time.Time `json:"publishedParsed,omitempty"`
Authors []*Person `gorm:"many2many:item_authors;" json:"authors,omitempty"`
GUID string `json:"guid,omitempty"`
Image *Image `gorm:"foreignKey:ID" json:"image,omitempty"`
Categories []string `gorm:"type:text[]" json:"categories,omitempty"`
Enclosures []*Enclosure `gorm:"foreignKey:ID" json:"enclosures,omitempty"`
DublinCoreExt *DublinCoreExtension `gorm:"foreignKey:ID" json:"dcExt,omitempty"`
ITunesExt *ITunesFeedExtension `gorm:"foreignKey:ID" json:"itunesExt,omitempty"`
Extensions Extensions `gorm:"type:json" json:"extensions,omitempty"`
Custom map[string]string `gorm:"type:json" json:"custom,omitempty"`
}
type Person struct {
gorm.Model
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
}
func (Person) TableName() string {
return "feed_authors"
}
type Image struct {
gorm.Model
URL string `json:"url,omitempty"`
Title string `json:"title,omitempty"`
}
type Enclosure struct {
gorm.Model
URL string `json:"url,omitempty"`
Length string `json:"length,omitempty"`
Type string `json:"type,omitempty"`
}
type DublinCoreExtension struct {
gorm.Model
Title []string `gorm:"type:text[]" json:"title,omitempty"`
Creator []string `gorm:"type:text[]" json:"creator,omitempty"`
Author []string `gorm:"type:text[]" json:"author,omitempty"`
Subject []string `gorm:"type:text[]" json:"subject,omitempty"`
Description []string `gorm:"type:text[]" json:"description,omitempty"`
Publisher []string `gorm:"type:text[]" json:"publisher,omitempty"`
Contributor []string `gorm:"type:text[]" json:"contributor,omitempty"`
Date []string `gorm:"type:text[]" json:"date,omitempty"`
Type []string `gorm:"type:text[]" json:"type,omitempty"`
Format []string `gorm:"type:text[]" json:"format,omitempty"`
Identifier []string `gorm:"type:text[]" json:"identifier,omitempty"`
Source []string `gorm:"type:text[]" json:"source,omitempty"`
Language []string `gorm:"type:text[]" json:"language,omitempty"`
Relation []string `gorm:"type:text[]" json:"relation,omitempty"`
Coverage []string `gorm:"type:text[]" json:"coverage,omitempty"`
Rights []string `gorm:"type:text[]" json:"rights,omitempty"`
}
type ITunesFeedExtension struct {
gorm.Model
Author string `json:"author,omitempty"`
Block string `json:"block,omitempty"`
Categories []*ITunesCategory `gorm:"many2many:feed_itunes_categories;" json:"categories,omitempty"`
Explicit string `json:"explicit,omitempty"`
Keywords string `json:"keywords,omitempty"`
Owner *ITunesOwner `gorm:"foreignKey:ID" json:"owner,omitempty"`
Subtitle string `json:"subtitle,omitempty"`
Summary string `json:"summary,omitempty"`
Image string `json:"image,omitempty"`
Complete string `json:"complete,omitempty"`
NewFeedURL string `json:"newFeedUrl,omitempty"`
Type string `json:"type,omitempty"`
}
type ITunesItemExtension struct {
gorm.Model
Author string `json:"author,omitempty"`
Block string `json:"block,omitempty"`
Duration string `json:"duration,omitempty"`
Explicit string `json:"explicit,omitempty"`
Keywords string `json:"keywords,omitempty"`
Subtitle string `json:"subtitle,omitempty"`
Summary string `json:"summary,omitempty"`
Image string `json:"image,omitempty"`
IsClosedCaptioned string `json:"isClosedCaptioned,omitempty"`
Episode string `json:"episode,omitempty"`
Season string `json:"season,omitempty"`
Order string `json:"order,omitempty"`
EpisodeType string `json:"episodeType,omitempty"`
}
type ITunesCategory struct {
gorm.Model
Text string `json:"text,omitempty"`
Subcategory *ITunesCategory `gorm:"many2many:feed_itunes_categories;" json:"subcategory,omitempty"`
}
func (ITunesCategory) TableName() string {
return "feed_itunes_categories"
}
type ITunesOwner struct {
gorm.Model
Email string `json:"email,omitempty"`
Name string `json:"name,omitempty"`
}
type Extensions map[string]map[string][]Extension
type Extension struct {
gorm.Model
Name string `json:"name"`
Value string `json:"value"`
Attrs map[string]string `gorm:"type:json" json:"attrs"`
Children map[string][]Extension `gorm:"type:json" json:"children"`
}
type TemplateData struct {
Title string
Description string
Keywords string
Author string
CanonicalURL string
FeedCount int
DatabaseSize string
Request *http.Request
ParseErrors []ParseResult
}
type ParseResult struct {
FeedURL string
Msg string
IsError bool
}
func (d *TemplateData) GetDatabaseSizeAndFeedCount() {
d.DatabaseSize = GetDBSize()
}

20
pkg/handlers/api.go Normal file
View file

@ -0,0 +1,20 @@
package handlers
import (
"net/http"
"github.com/TheLovinator1/FeedVault/pkg/html"
)
func ApiHandler(w http.ResponseWriter, _ *http.Request) {
htmlData := html.HTMLData{
Title: "FeedVault API",
Description: "FeedVault API - A feed archive",
Keywords: "RSS, Atom, Feed, Archive, API",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/api",
Content: "<p>Here be dragons.</p>",
}
html := html.FullHTML(htmlData)
w.Write([]byte(html))
}

64
pkg/handlers/feeds.go Normal file
View file

@ -0,0 +1,64 @@
package handlers
import (
"log"
"net/http"
"strings"
"github.com/TheLovinator1/FeedVault/pkg/html"
"github.com/TheLovinator1/FeedVault/pkg/models"
"github.com/TheLovinator1/FeedVault/pkg/validate"
)
func FeedsHandler(w http.ResponseWriter, _ *http.Request) {
htmlData := html.HTMLData{
Title: "FeedVault Feeds",
Description: "FeedVault Feeds - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/feeds",
Content: "<p>Here be feeds.</p>",
}
html := html.FullHTML(htmlData)
w.Write([]byte(html))
}
func AddFeedHandler(w http.ResponseWriter, r *http.Request) {
var parseErrors []models.ParseResult
// Parse the form and get the URLs
r.ParseForm()
urls := r.Form.Get("urls")
if urls == "" {
http.Error(w, "No URLs provided", http.StatusBadRequest)
return
}
for _, feed_url := range strings.Split(urls, "\n") {
// TODO: Try to upgrade to https if http is provided
// Validate the URL
err := validate.ValidateFeedURL(feed_url)
if err != nil {
parseErrors = append(parseErrors, models.ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
// "Add" the feed to the database
log.Println("Adding feed:", feed_url)
parseErrors = append(parseErrors, models.ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false})
}
htmlData := html.HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: "<p>Feeds added.</p>",
ParseResult: parseErrors,
}
html := html.FullHTML(htmlData)
w.Write([]byte(html))
}

79
pkg/handlers/index.go Normal file
View file

@ -0,0 +1,79 @@
package handlers
import (
"net/http"
"github.com/TheLovinator1/FeedVault/pkg/html"
)
func IndexHandler(w http.ResponseWriter, _ *http.Request) {
content := `<h2>Feeds to archive</h2>
<p>
Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived.
</p>
<form action="/add" method="post">
<textarea id="urls" name="urls" rows="5" cols="50" required></textarea>
<button type="submit">Add feeds</button>
</form>
<br>
<p>You can also upload .opml files containing the feeds you wish to archive:</p>
<form enctype="multipart/form-data" method="post" action="/upload_opml">
<input type="file" name="file" id="file" accept=".opml" required>
<button type="submit">Upload OPML</button>
</form>
`
FAQ := `
<h2>FAQ</h2>
<details>
<summary>What are web feeds?</summary>
<p>
Web feeds are a way to distribute content on the web. They allow users to access updates from websites without having to visit them directly. Feeds are typically used for news websites, blogs, and other sites that frequently update content.
<br>
You can read more about web feeds on <a href="https://en.wikipedia.org/wiki/Web_feed">Wikipedia</a>.
</p>
<hr>
</details>
<details>
<summary>What is FeedVault?</summary>
<p>
FeedVault is a service that archives web feeds. It allows users to access and search for historical content from various websites. The service is designed to preserve the history of the web and provide a reliable source for accessing content that may no longer be available on the original websites.
</p>
<hr>
</details>
<details>
<summary>Why archive feeds?</summary>
<p>
Web feeds are a valuable source of information, and archiving them ensures that the content is preserved for future reference. By archiving feeds, we can ensure that historical content is available for research, analysis, and other purposes. Additionally, archiving feeds can help prevent the loss of valuable information due to website changes, outages, or other issues.
</p>
<hr>
</details>
<details>
<summary>How does it work?</summary>
<p>
FeedVault is written in Go and uses the <a href="https://github.com/mmcdole/gofeed">gofeed</a> library to parse feeds. The service periodically checks for new content in the feeds and stores it in a database. Users can access the archived feeds through the website or API.
<hr>
</details>
<details>
<summary>How can I access the archived feeds?</summary>
<p>
You can access the archived feeds through the website or API. The website provides a user interface for searching and browsing the feeds, while the API allows you to access the feeds programmatically. You can also download the feeds in various formats, such as JSON, XML, or RSS.
</p>
</details>
`
content += FAQ
htmlData := html.HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: content,
}
html := html.FullHTML(htmlData)
w.Write([]byte(html))
}

63
pkg/handlers/opml.go Normal file
View file

@ -0,0 +1,63 @@
package handlers
import (
"io"
"log"
"net/http"
"github.com/TheLovinator1/FeedVault/pkg/html"
"github.com/TheLovinator1/FeedVault/pkg/models"
"github.com/TheLovinator1/FeedVault/pkg/opml"
"github.com/TheLovinator1/FeedVault/pkg/validate"
)
func UploadOpmlHandler(w http.ResponseWriter, r *http.Request) {
// Parse the form and get the file
r.ParseMultipartForm(10 << 20) // 10 MB
file, _, err := r.FormFile("file")
if err != nil {
http.Error(w, "No file provided", http.StatusBadRequest)
return
}
defer file.Close()
// Read the file
all, err := io.ReadAll(file)
if err != nil {
http.Error(w, "Failed to read file", http.StatusInternalServerError)
return
}
// Parse the OPML file
parseResult := []models.ParseResult{}
links, err := opml.ParseOpml(string(all))
if err != nil {
parseResult = append(parseResult, models.ParseResult{FeedURL: "/upload_opml", Msg: err.Error(), IsError: true})
} else {
// Add the feeds to the database
for _, feed_url := range links.XMLLinks {
log.Println("Adding feed:", feed_url)
// Validate the URL
err := validate.ValidateFeedURL(feed_url)
if err != nil {
parseResult = append(parseResult, models.ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
parseResult = append(parseResult, models.ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false})
}
}
// Return the results
htmlData := html.HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: "<p>Feeds added.</p>",
ParseResult: parseResult,
}
html := html.FullHTML(htmlData)
w.Write([]byte(html))
}

View file

@ -1,13 +1,13 @@
package main package html
import ( import (
"fmt" "fmt"
"math/rand" "math/rand"
"strings" "strings"
"github.com/tdewolff/minify/v2" "github.com/TheLovinator1/FeedVault/pkg/models"
"github.com/tdewolff/minify/v2/css" "github.com/TheLovinator1/FeedVault/pkg/quotes"
"github.com/tdewolff/minify/v2/html" "github.com/TheLovinator1/FeedVault/pkg/stats"
) )
type HTMLData struct { type HTMLData struct {
@ -17,27 +17,7 @@ type HTMLData struct {
Author string Author string
CanonicalURL string CanonicalURL string
Content string Content string
ParseResult []ParseResult ParseResult []models.ParseResult
}
func minifyHTML(h string) string {
m := minify.New()
m.AddFunc("text/html", html.Minify)
minified, err := m.String("text/html", h)
if err != nil {
return h
}
return minified
}
func minifyCSS(h string) string {
m := minify.New()
m.AddFunc("text/css", css.Minify)
minified, err := m.String("text/css", h)
if err != nil {
return h
}
return minified
} }
var style = ` var style = `
@ -99,12 +79,12 @@ textarea {
} }
` `
func fullHTML(h HTMLData) string { func FullHTML(h HTMLData) string {
var sb strings.Builder var sb strings.Builder
var errorBuilder strings.Builder var errorBuilder strings.Builder
FeedCount := 0 FeedCount := 0
DatabaseSize := GetDBSize() DatabaseSize := stats.GetDBSize()
// This is the error message that will be displayed if there are any errors // This is the error message that will be displayed if there are any errors
if len(h.ParseResult) > 0 { if len(h.ParseResult) > 0 {
@ -151,7 +131,7 @@ func fullHTML(h HTMLData) string {
sb.WriteString(` sb.WriteString(`
<title>` + h.Title + `</title> <title>` + h.Title + `</title>
<style>` + minifyCSS(style) + `</style> <style>` + style + `</style>
</head> </head>
<body> <body>
` + StatusMsg + ` ` + StatusMsg + `
@ -198,7 +178,7 @@ func fullHTML(h HTMLData) string {
<a href="mailto:hello@feedvault.se">hello@feedvault.se</a> <a href="mailto:hello@feedvault.se">hello@feedvault.se</a>
</div> </div>
<div class="right"> <div class="right">
` + funMsg[rand.Intn(len(funMsg))] + ` ` + quotes.FunMsg[rand.Intn(len(quotes.FunMsg))] + `
</div> </div>
</div> </div>
</small> </small>
@ -206,6 +186,6 @@ func fullHTML(h HTMLData) string {
</body> </body>
</html>`) </html>`)
return minifyHTML(sb.String()) return sb.String()
} }

32
pkg/models/models.go Normal file
View file

@ -0,0 +1,32 @@
package models
import (
"net/http"
"github.com/TheLovinator1/FeedVault/pkg/stats"
)
type TemplateData struct {
Title string
Description string
Keywords string
Author string
CanonicalURL string
FeedCount int
DatabaseSize string
Request *http.Request
ParseErrors []ParseResult
}
type ParseResult struct {
FeedURL string
Msg string
IsError bool
}
func (d *TemplateData) GetDatabaseSizeAndFeedCount() {
// TODO: Get the feed count from the database
// TODO: Add amount of entries
// TODO: Add amount of users
d.DatabaseSize = stats.GetDBSize()
}

View file

@ -1,4 +1,4 @@
package main package opml
import "encoding/xml" import "encoding/xml"
@ -36,7 +36,7 @@ type linksFromOpml struct {
HTMLLinks []string `json:"htmlLinks"` HTMLLinks []string `json:"htmlLinks"`
} }
func removeDuplicates(s []string) []string { func RemoveDuplicates(s []string) []string {
seen := make(map[string]struct{}, len(s)) seen := make(map[string]struct{}, len(s))
j := 0 j := 0
for _, v := range s { for _, v := range s {
@ -87,8 +87,8 @@ func ParseOpml(s string) (linksFromOpml, error) {
} }
// Remove any duplicates // Remove any duplicates
links.XMLLinks = removeDuplicates(links.XMLLinks) links.XMLLinks = RemoveDuplicates(links.XMLLinks)
links.HTMLLinks = removeDuplicates(links.HTMLLinks) links.HTMLLinks = RemoveDuplicates(links.HTMLLinks)
return links, nil return links, nil
} }

View file

@ -1,12 +1,7 @@
package main package quotes
import (
"fmt"
"math/rand"
)
// "Fun" messages that will be displayed in the footer // "Fun" messages that will be displayed in the footer
var funMsg = []string{ var FunMsg = []string{
"Web scraping is not a crime.", "Web scraping is not a crime.",
"Made in Sweden.", "Made in Sweden.",
"🙃", "🙃",
@ -16,14 +11,12 @@ var funMsg = []string{
"A feed in the hand is worth two in the bush.", "A feed in the hand is worth two in the bush.",
"Death begets death begets death.", "Death begets death begets death.",
"I am Eo's dream.", "I am Eo's dream.",
"Through the thorns to the stars.",
"Freedom in an unjust system is no freedom at all.", "Freedom in an unjust system is no freedom at all.",
"Omnis vir lupus.", "Omnis vir lupus.",
"Shit escalates.", "Shit escalates.",
"Break the chains, my love.", "Break the chains, my love.",
"Sharpened by hate. Strengthened by love.", "Sharpened by hate. Strengthened by love.",
"Hic sunt leones.", "Hic sunt leones.",
"The Reaper has come. And he's brought hell with him.",
"Keyboard not found. Press F1 to continue.", "Keyboard not found. Press F1 to continue.",
"The stars shine brighter when shared among comrades.", "The stars shine brighter when shared among comrades.",
"Zzz... 🛌", "Zzz... 🛌",
@ -31,26 +24,12 @@ var funMsg = []string{
"Open source, open heart.", "Open source, open heart.",
"RSS is the new black.", "RSS is the new black.",
"Unsubscribe.", "Unsubscribe.",
"Copyright © 2004-2021 Microsoft Corporation.",
"ChatGPT made 99% of this website :-)", "ChatGPT made 99% of this website :-)",
fmt.Sprintf("%d is the year of the Linux desktop.", 2024+rand.Intn(100)),
":-)",
":^)",
"( ͡° ͜ʖ ͡°)",
"pls seed",
"My life for Aiur!",
"For the swarm!",
"Do not the cat.",
"hal[",
"Meow?",
"Rawr!",
"👁️👄👁️", "👁️👄👁️",
"From each planet, to each star—equality in the cosmos.", "From each planet, to each star—equality in the cosmos.",
"In the vastness of space, no one should own more than they can share.", "In the vastness of space, no one should own more than they can share.",
"http://",
"Workers of the universe, unite! The stars are our common heritage.", "Workers of the universe, unite! The stars are our common heritage.",
"Space is for all, not just the privileged few.", "Space is for all, not just the privileged few.",
"No more celestial landlords!",
"From the red planet to the black hole, solidarity knows no borders.", "From the red planet to the black hole, solidarity knows no borders.",
"Astronauts of the world, unite for a cosmic revolution!", "Astronauts of the world, unite for a cosmic revolution!",
"Space is for everyone, not just the 1%.", "Space is for everyone, not just the 1%.",
@ -59,7 +38,6 @@ var funMsg = []string{
"The red glow of the stars reflects the spirit of collective effort.", "The red glow of the stars reflects the spirit of collective effort.",
"The final frontier is a shared frontier, where no one is left behind.", "The final frontier is a shared frontier, where no one is left behind.",
"Vote for a space utopia!", "Vote for a space utopia!",
"Space is for the many, not the few.",
"From the Milky Way to Andromeda, the stars belong to the people.", "From the Milky Way to Andromeda, the stars belong to the people.",
"Space is for the workers, not the bosses.", "Space is for the workers, not the bosses.",
"Let the fruits of progress be the common heritage of all.", "Let the fruits of progress be the common heritage of all.",
@ -67,24 +45,13 @@ var funMsg = []string{
"The stars do not discriminate; neither should we.", "The stars do not discriminate; neither should we.",
"In the vacuum of space, let equality fill the void.", "In the vacuum of space, let equality fill the void.",
"From Big Bang to the heat death of the universe, solidarity is eternal.", "From Big Bang to the heat death of the universe, solidarity is eternal.",
"Your body betrays your degeneracy.",
"You need to get your shit together.",
"I can't help you. I am totally useless.",
"In dark times, should the stars also go out?", "In dark times, should the stars also go out?",
"One day I will return to your side.", "One day I will return to your side.",
"Un Jour Je Serai de Retour Prés de Toi", "Un Jour Je Serai de Retour Prés de Toi",
"These communists aren't men, they're *mole people*!", "You should build Space Communism — precisely *because* it's impossible.",
"You should build Communism — precisely *because* it's impossible.",
"She thinks you are an idiot, sire.", "She thinks you are an idiot, sire.",
"The song of death is sweet and endless.", "The song of death is sweet and endless.",
"Child-murdering billionaires still rule the world with a shit-eating grin.", "Child-murdering billionaires still rule the world with a shit-eating grin.",
"Instead of building Communism, he now builds grotesque sites.",
"Eight billion people - and you failed every single one of them.", "Eight billion people - and you failed every single one of them.",
"You are the first crack. From you it will spread.", "You are the first crack. From you it will spread.",
"Playing Sad F.M.",
"Do you remember the scent of your childhood?",
"You are a man with a lot of past, little present and almost no future.",
"European utopia or death",
"What do the know about Sweden, who only Sweden know?",
"Imagine a life in which all your basic needs were met.",
} }

View file

@ -1,4 +1,4 @@
package main package stats
import ( import (
"fmt" "fmt"

View file

@ -1,4 +1,4 @@
package main package validate
import ( import (
"errors" "errors"
@ -9,7 +9,7 @@ import (
) )
// Run some simple validation on the URL // Run some simple validation on the URL
func validateURL(feed_url string) error { func ValidateFeedURL(feed_url string) error {
// Check if URL starts with http or https // Check if URL starts with http or https
if !strings.HasPrefix(feed_url, "http://") && !strings.HasPrefix(feed_url, "https://") { if !strings.HasPrefix(feed_url, "http://") && !strings.HasPrefix(feed_url, "https://") {
return errors.New("URL must start with http:// or https://") return errors.New("URL must start with http:// or https://")

View file

View file

View file

@ -1,9 +0,0 @@
version: "2"
sql:
- engine: "postgresql"
queries: "sql/queries"
schema: "sql/schema"
gen:
go:
sql_package: "pgx/v5"
out: "internal/database"

View file

@ -1,28 +0,0 @@
package main
import (
"testing"
"time"
)
// If the cache is less than 10 minutes old, return the cached data.
func TestCacheLessThan10MinutesOld(t *testing.T) {
result := GetDBSize()
// Assert that the size of the database is returned
if result != cache.data {
t.Errorf("Expected database size, but got %s", result)
}
}
// If the cache is more than 10 minutes old, return the size of the database.
func TestCacheMoreThan10MinutesOld(t *testing.T) {
// Set the cache timestamp to 11 minutes ago
cache.timestamp = time.Now().Add(-11 * time.Minute)
result := GetDBSize()
// Assert that the size of the database is returned
if result != cache.data {
t.Errorf("Expected database size, but got %s", result)
}
}

30
tests/html_test.go Normal file
View file

@ -0,0 +1,30 @@
package main
import (
"strings"
"testing"
"github.com/TheLovinator1/FeedVault/pkg/html"
"github.com/TheLovinator1/FeedVault/pkg/models"
)
// Displays error messages if there are any parse errors
func TestErrorMessages(t *testing.T) {
// Initialize test data
parseResult := []models.ParseResult{
{IsError: true, Msg: "Error 1"},
{IsError: true, Msg: "Error 2"},
}
h := html.HTMLData{
ParseResult: parseResult,
}
// Invoke function under test
result := html.FullHTML(h)
// Assert that the result contains the error messages
if !strings.Contains(result, "Error 1") || !strings.Contains(result, "Error 2") {
t.Errorf("Expected error messages, but got: %s", result)
}
}

View file

@ -1,6 +1,10 @@
package main package main
import "testing" import (
"testing"
"github.com/TheLovinator1/FeedVault/pkg/opml"
)
var opmlExample = `<?xml version="1.0" encoding="utf-8"?> var opmlExample = `<?xml version="1.0" encoding="utf-8"?>
<opml version="1.0"> <opml version="1.0">
@ -30,7 +34,7 @@ var secondOpmlExample = `<?xml version="1.0" encoding="UTF-8"?>
// Test the opml parser // Test the opml parser
func TestParseOpml(t *testing.T) { func TestParseOpml(t *testing.T) {
links, err := ParseOpml(opmlExample) links, err := opml.ParseOpml(opmlExample)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
@ -42,8 +46,8 @@ func TestParseOpml(t *testing.T) {
} }
// Test that the links are unique // Test that the links are unique
links.XMLLinks = removeDuplicates(links.XMLLinks) links.XMLLinks = opml.RemoveDuplicates(links.XMLLinks)
links.HTMLLinks = removeDuplicates(links.HTMLLinks) links.HTMLLinks = opml.RemoveDuplicates(links.HTMLLinks)
if len(links.XMLLinks) != 2 { if len(links.XMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.XMLLinks)) t.Errorf("Expected 2 links, got %d", len(links.XMLLinks))
} }
@ -69,7 +73,7 @@ func TestParseOpml(t *testing.T) {
// Test the opml parser with nested outlines // Test the opml parser with nested outlines
func TestParseOpmlNested(t *testing.T) { func TestParseOpmlNested(t *testing.T) {
links, err := ParseOpml(secondOpmlExample) links, err := opml.ParseOpml(secondOpmlExample)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
@ -81,8 +85,8 @@ func TestParseOpmlNested(t *testing.T) {
} }
// Test that the links are unique // Test that the links are unique
links.XMLLinks = removeDuplicates(links.XMLLinks) links.XMLLinks = opml.RemoveDuplicates(links.XMLLinks)
links.HTMLLinks = removeDuplicates(links.HTMLLinks) links.HTMLLinks = opml.RemoveDuplicates(links.HTMLLinks)
if len(links.XMLLinks) != 2 { if len(links.XMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.XMLLinks)) t.Errorf("Expected 2 links, got %d", len(links.XMLLinks))
} }

248
tests/validate_test.go Normal file
View file

@ -0,0 +1,248 @@
package main
import (
"testing"
"github.com/TheLovinator1/FeedVault/pkg/validate"
)
// URL starts with http://
func TestURLStartsWithHTTP(t *testing.T) {
url := "http://example.com"
err := validate.ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// URL starts with https://
func TestURLStartsWithHTTPS(t *testing.T) {
url := "https://example.com"
err := validate.ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// URL contains a valid domain
func TestURLContainsValidDomain(t *testing.T) {
url := "http://example.com"
err := validate.ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// URL is empty
func TestURLEmpty(t *testing.T) {
url := ""
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL does not contain a domain
func TestURLNotNumbers(t *testing.T) {
url := "12345"
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL is not a valid URL
func TestURLNotValidURL(t *testing.T) {
url := "example.com"
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// Domain is resolvable
func TestDomainIsResolvable(t *testing.T) {
url := "http://example.com"
err := validate.ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// Domain does not end with .local
func TestDomainDoesNotEndWithLocal(t *testing.T) {
url := "http://example.com"
err := validate.ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// Domain is not localhost
func TestDomainIsNotLocalhost(t *testing.T) {
url := "http://example.com"
err := validate.ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// Domain is not an IP address
func TestDomainIsNotIPAddress(t *testing.T) {
url := "http://example.com"
err := validate.ValidateFeedURL(url)
if err != nil {
t.Errorf("Expected no error, got %v", err)
}
}
// URL is a file path
func TestURLIsFilePath(t *testing.T) {
url := "/path/to/file"
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL is a relative path
func TestURLIsRelativePath(t *testing.T) {
url := "/path/to/resource"
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL is a non-existent domain
func TestNonExistentDomainURL(t *testing.T) {
url := "http://jfsalksajlkfsajklfsajklfllfjffffkfsklslsksassflfskjlfjlfsjkalfsaf.com"
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "failed to resolve domain" {
t.Errorf("Expected error message 'failed to resolve domain', got '%v'", err.Error())
}
}
// URL is a malformed URL
func TestMalformedURL(t *testing.T) {
url := "malformedurl"
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL is a domain that does not support HTTP/HTTPS
func TestURLDomainNotSupportHTTP(t *testing.T) {
url := "ftp://example.com"
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" {
t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error())
}
}
// URL is an unreachable domain
func TestUnreachableDomain(t *testing.T) {
url := "http://fafsffsfsfsfsafsasafassfs.com"
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "failed to resolve domain" {
t.Errorf("Expected error message 'failed to resolve domain', got '%v'", err.Error())
}
}
// URL is an IP address
func TestURLIsIPAddress(t *testing.T) {
url := "http://84.55.107.42"
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "IP address URLs are not allowed" {
t.Errorf("Expected error message 'IP address URLs are not allowed', got '%v'", err.Error())
}
}
// URL ends with .local
func TestURLEndsWithLocal(t *testing.T) {
url := "http://example.local"
err := validate.ValidateFeedURL(url)
if err == nil {
t.Error("Expected an error, got nil")
} else if err.Error() != "URLs ending with .local are not allowed" {
t.Errorf("Expected error message 'URLs ending with .local are not allowed', got '%v'", err.Error())
}
}
func TestLocalURLs(t *testing.T) {
localURLs := []string{
"https://localhost",
"https://home.arpa",
"https://airbox.home",
"https://airport",
"https://arcor.easybox",
"https://aterm.me",
"https://bthub.home",
"https://bthomehub.home",
"https://congstar.box",
"https://connect.box",
"https://console.gl-inet.com",
"https://easy.box",
"https://etxr",
"https://fire.walla",
"https://fritz.box",
"https://fritz.nas",
"https://fritz.repeater",
"https://giga.cube",
"https://hi.link",
"https://hitronhub.home",
"https://homerouter.cpe",
"https://huaweimobilewifi.com",
"https://localbattle.net",
"https://myfritz.box",
"https://mobile.hotspot",
"https://ntt.setup",
"https://pi.hole",
"https://plex.direct",
"https://repeater.asus.com",
"https://router.asus.com",
"https://routerlogin.com",
"https://routerlogin.net",
"https://samsung.router",
"https://speedport.ip",
"https://steamloopback.host",
"https://tplinkap.net",
"https://tplinkeap.net",
"https://tplinkmodem.net",
"https://tplinkplclogin.net",
"https://tplinkrepeater.net",
"https://tplinkwifi.net",
"https://web.setup",
"https://web.setup.home",
}
for _, localURL := range localURLs {
err := validate.ValidateFeedURL(localURL)
if err == nil {
t.Errorf("Expected an error for local URL %s, got nil", localURL)
} else if err.Error() != "local URLs are not allowed" {
t.Errorf("Expected error message 'local URLs are not allowed', got '%v'", err.Error())
}
}
}

View file

@ -3,9 +3,10 @@ package main
import ( import (
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"strings"
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/TheLovinator1/FeedVault/pkg/handlers"
) )
func TestIndexHandler(t *testing.T) { func TestIndexHandler(t *testing.T) {
@ -17,7 +18,7 @@ func TestIndexHandler(t *testing.T) {
// We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response. // We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response.
rr := httptest.NewRecorder() rr := httptest.NewRecorder()
handler := http.HandlerFunc(IndexHandler) handler := http.HandlerFunc(handlers.IndexHandler)
// Our handlers satisfy http.Handler, so we can call their ServeHTTP method // Our handlers satisfy http.Handler, so we can call their ServeHTTP method
// directly and pass in our Request and ResponseRecorder. // directly and pass in our Request and ResponseRecorder.
@ -32,7 +33,7 @@ func TestIndexHandler(t *testing.T) {
// Check the response contains the expected string. // Check the response contains the expected string.
shouldContain := "Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived." shouldContain := "Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived."
body := rr.Body.String() body := rr.Body.String()
if !assert.Contains(t, body, shouldContain) { if !strings.Contains(body, shouldContain) {
t.Errorf("handler returned unexpected body: got %v want %v", t.Errorf("handler returned unexpected body: got %v want %v",
body, shouldContain) body, shouldContain)
} }
@ -47,7 +48,7 @@ func TestApiHandler(t *testing.T) {
// We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response. // We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response.
rr := httptest.NewRecorder() rr := httptest.NewRecorder()
handler := http.HandlerFunc(ApiHandler) handler := http.HandlerFunc(handlers.ApiHandler)
// Our handlers satisfy http.Handler, so we can call their ServeHTTP method // Our handlers satisfy http.Handler, so we can call their ServeHTTP method
// directly and pass in our Request and ResponseRecorder. // directly and pass in our Request and ResponseRecorder.
@ -62,7 +63,7 @@ func TestApiHandler(t *testing.T) {
// Check the response contains the expected string. // Check the response contains the expected string.
shouldContain := "Here be dragons." shouldContain := "Here be dragons."
body := rr.Body.String() body := rr.Body.String()
if !assert.Contains(t, body, shouldContain) { if !strings.Contains(body, shouldContain) {
t.Errorf("handler returned unexpected body: got %v want %v", t.Errorf("handler returned unexpected body: got %v want %v",
body, shouldContain) body, shouldContain)
} }

View file

@ -1,200 +0,0 @@
package main
import (
"testing"
"github.com/stretchr/testify/assert"
)
// URL starts with http://
func TestURLStartsWithHTTP(t *testing.T) {
url := "http://example.com"
err := validateURL(url)
assert.Nil(t, err)
}
// URL starts with https://
func TestURLStartsWithHTTPS(t *testing.T) {
url := "https://example.com"
err := validateURL(url)
assert.Nil(t, err)
}
// URL contains a valid domain
func TestURLContainsValidDomain(t *testing.T) {
url := "http://example.com"
err := validateURL(url)
assert.Nil(t, err)
}
// URL is empty
func TestURLEmpty(t *testing.T) {
url := ""
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "URL must start with http:// or https://", err.Error())
}
// URL does not contain a domain
func TestURLNotNumbers(t *testing.T) {
url := "12345"
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "URL must start with http:// or https://", err.Error())
}
// URL is not a valid URL
func TestURLNotValidURL(t *testing.T) {
url := "example.com"
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "URL must start with http:// or https://", err.Error())
}
// Domain is resolvable
func TestDomainIsResolvable(t *testing.T) {
url := "http://example.com"
err := validateURL(url)
assert.Nil(t, err)
}
// Domain does not end with .local
func TestDomainDoesNotEndWithLocal(t *testing.T) {
url := "http://example.com"
err := validateURL(url)
assert.Nil(t, err)
}
// Domain is not localhost
func TestDomainIsNotLocalhost(t *testing.T) {
url := "http://example.com"
err := validateURL(url)
assert.Nil(t, err)
}
// Domain is not an IP address
func TestDomainIsNotIPAddress(t *testing.T) {
url := "http://example.com"
err := validateURL(url)
assert.Nil(t, err)
}
// URL is a file path
func TestURLIsFilePath(t *testing.T) {
url := "/path/to/file"
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "URL must start with http:// or https://", err.Error())
}
// URL is a relative path
func TestURLIsRelativePath(t *testing.T) {
url := "/path/to/resource"
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "URL must start with http:// or https://", err.Error())
}
// URL is a non-existent domain
func TestNonExistentDomainURL(t *testing.T) {
url := "http://jfsalksajlkfsajklfsajklfllfjffffkfsklslsksassflfskjlfjlfsjkalfsaf.com"
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "failed to resolve domain", err.Error())
}
// URL is a malformed URL
func TestMalformedURL(t *testing.T) {
url := "malformedurl"
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "URL must start with http:// or https://", err.Error())
}
// URL is a domain that does not support HTTP/HTTPS
func TestURLDomainNotSupportHTTP(t *testing.T) {
url := "ftp://example.com"
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "URL must start with http:// or https://", err.Error())
}
// URL is an unreachable domain
func TestUnreachableDomain(t *testing.T) {
url := "http://fafsffsfsfsfsafsasafassfs.com"
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "failed to resolve domain", err.Error())
}
// URL is an IP address
func TestURLIsIPAddress(t *testing.T) {
url := "http://84.55.107.42"
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "IP address URLs are not allowed", err.Error())
}
// URL ends with .local
func TestURLEndsWithLocal(t *testing.T) {
url := "http://example.local"
err := validateURL(url)
assert.NotNil(t, err)
assert.Equal(t, "URLs ending with .local are not allowed", err.Error())
}
func TestLocalURLs(t *testing.T) {
localURLs := []string{
"https://localhost",
"https://home.arpa",
"https://airbox.home",
"https://airport",
"https://arcor.easybox",
"https://aterm.me",
"https://bthub.home",
"https://bthomehub.home",
"https://congstar.box",
"https://connect.box",
"https://console.gl-inet.com",
"https://easy.box",
"https://etxr",
"https://fire.walla",
"https://fritz.box",
"https://fritz.nas",
"https://fritz.repeater",
"https://giga.cube",
"https://hi.link",
"https://hitronhub.home",
"https://homerouter.cpe",
"https://huaweimobilewifi.com",
"https://localbattle.net",
"https://myfritz.box",
"https://mobile.hotspot",
"https://ntt.setup",
"https://pi.hole",
"https://plex.direct",
"https://repeater.asus.com",
"https://router.asus.com",
"https://routerlogin.com",
"https://routerlogin.net",
"https://samsung.router",
"https://speedport.ip",
"https://steamloopback.host",
"https://tplinkap.net",
"https://tplinkeap.net",
"https://tplinkmodem.net",
"https://tplinkplclogin.net",
"https://tplinkrepeater.net",
"https://tplinkwifi.net",
"https://web.setup",
"https://web.setup.home",
}
for _, localURL := range localURLs {
err := validateURL(localURL)
if err == nil {
t.Errorf("Expected an error for local URL %s, got nil", localURL)
}
assert.Equal(t, "local URLs are not allowed", err.Error())
}
}

199
views.go
View file

@ -1,199 +0,0 @@
package main
import (
"io"
"log"
"net/http"
"strings"
)
func IndexHandler(w http.ResponseWriter, _ *http.Request) {
content := `<h2>Feeds to archive</h2>
<p>
Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived.
</p>
<form action="/add" method="post">
<textarea id="urls" name="urls" rows="5" cols="50" required></textarea>
<button type="submit">Add feeds</button>
</form>
<br>
<p>You can also upload .opml files containing the feeds you wish to archive:</p>
<form enctype="multipart/form-data" method="post" action="/upload_opml">
<input type="file" name="file" id="file" accept=".opml" required>
<button type="submit">Upload OPML</button>
</form>
`
FAQ := `
<h2>FAQ</h2>
<details>
<summary>What are web feeds?</summary>
<p>
Web feeds are a way to distribute content on the web. They allow users to access updates from websites without having to visit them directly. Feeds are typically used for news websites, blogs, and other sites that frequently update content.
<br>
You can read more about web feeds on <a href="https://en.wikipedia.org/wiki/Web_feed">Wikipedia</a>.
</p>
<hr>
</details>
<details>
<summary>What is FeedVault?</summary>
<p>
FeedVault is a service that archives web feeds. It allows users to access and search for historical content from various websites. The service is designed to preserve the history of the web and provide a reliable source for accessing content that may no longer be available on the original websites.
</p>
<hr>
</details>
<details>
<summary>Why archive feeds?</summary>
<p>
Web feeds are a valuable source of information, and archiving them ensures that the content is preserved for future reference. By archiving feeds, we can ensure that historical content is available for research, analysis, and other purposes. Additionally, archiving feeds can help prevent the loss of valuable information due to website changes, outages, or other issues.
</p>
<hr>
</details>
<details>
<summary>How does it work?</summary>
<p>
FeedVault is written in Go and uses the <a href="https://github.com/mmcdole/gofeed">gofeed</a> library to parse feeds. The service periodically checks for new content in the feeds and stores it in a database. Users can access the archived feeds through the website or API.
<hr>
</details>
<details>
<summary>How can I access the archived feeds?</summary>
<p>
You can access the archived feeds through the website or API. The website provides a user interface for searching and browsing the feeds, while the API allows you to access the feeds programmatically. You can also download the feeds in various formats, such as JSON, XML, or RSS.
</p>
</details>
`
content += FAQ
htmlData := HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: content,
}
html := fullHTML(htmlData)
w.Write([]byte(html))
}
func ApiHandler(w http.ResponseWriter, _ *http.Request) {
htmlData := HTMLData{
Title: "FeedVault API",
Description: "FeedVault API - A feed archive",
Keywords: "RSS, Atom, Feed, Archive, API",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/api",
Content: "<p>Here be dragons.</p>",
}
html := fullHTML(htmlData)
w.Write([]byte(html))
}
func FeedsHandler(w http.ResponseWriter, _ *http.Request) {
htmlData := HTMLData{
Title: "FeedVault Feeds",
Description: "FeedVault Feeds - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/feeds",
Content: "<p>Here be feeds.</p>",
}
html := fullHTML(htmlData)
w.Write([]byte(html))
}
func AddFeedHandler(w http.ResponseWriter, r *http.Request) {
var parseErrors []ParseResult
// Parse the form and get the URLs
r.ParseForm()
urls := r.Form.Get("urls")
if urls == "" {
http.Error(w, "No URLs provided", http.StatusBadRequest)
return
}
for _, feed_url := range strings.Split(urls, "\n") {
// TODO: Try to upgrade to https if http is provided
// Validate the URL
err := validateURL(feed_url)
if err != nil {
parseErrors = append(parseErrors, ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
// "Add" the feed to the database
log.Println("Adding feed:", feed_url)
parseErrors = append(parseErrors, ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false})
}
htmlData := HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: "<p>Feeds added.</p>",
ParseResult: parseErrors,
}
html := fullHTML(htmlData)
w.Write([]byte(html))
}
func UploadOpmlHandler(w http.ResponseWriter, r *http.Request) {
// Parse the form and get the file
r.ParseMultipartForm(10 << 20) // 10 MB
file, _, err := r.FormFile("file")
if err != nil {
http.Error(w, "No file provided", http.StatusBadRequest)
return
}
defer file.Close()
// Read the file
all, err := io.ReadAll(file)
if err != nil {
http.Error(w, "Failed to read file", http.StatusInternalServerError)
return
}
// Convert the file to a string
opml := string(all)
// Parse the OPML file
parseResult := []ParseResult{}
links, err := ParseOpml(opml)
if err != nil {
parseResult = append(parseResult, ParseResult{FeedURL: "/upload_opml", Msg: err.Error(), IsError: true})
} else {
// Add the feeds to the database
for _, feed_url := range links.XMLLinks {
log.Println("Adding feed:", feed_url)
// Validate the URL
err := validateURL(feed_url)
if err != nil {
parseResult = append(parseResult, ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
parseResult = append(parseResult, ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false})
}
}
// Return the results
htmlData := HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: "<p>Feeds added.</p>",
ParseResult: parseResult,
}
html := fullHTML(htmlData)
w.Write([]byte(html))
}