From c7cca02ca7de9cce49bde925040d8a53d48f8c59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20Hells=C3=A9n?= Date: Tue, 13 Feb 2024 21:21:08 +0100 Subject: [PATCH] Refactor folder structure --- SECURITY.md => .github/SECURITY.md | 0 .vscode/settings.json | 7 +- CONTRIBUTING.md | 43 ---- Docker.md | 31 --- cmd/feedvault/main.go | 36 ++++ go.mod | 17 -- go.sum | 27 --- html_test.go | 53 ----- main.go | 55 ------ models.go | 180 ----------------- pkg/handlers/api.go | 20 ++ pkg/handlers/feeds.go | 64 ++++++ pkg/handlers/index.go | 79 ++++++++ pkg/handlers/opml.go | 63 ++++++ html.go => pkg/html/html.go | 40 +--- pkg/models/models.go | 32 +++ opml.go => pkg/opml/opml.go | 8 +- quotes.go => pkg/quotes/quotes.go | 39 +--- stats.go => pkg/stats/stats.go | 2 +- validate.go => pkg/validate/validate.go | 4 +- sql/queries/.gitkeep | 0 sql/schema/.gitkeep | 0 sqlc.yaml | 9 - stats_test.go | 28 --- tests/html_test.go | 30 +++ opml_test.go => tests/opml_test.go | 18 +- tests/validate_test.go | 248 ++++++++++++++++++++++++ views_test.go => tests/views_test.go | 11 +- validate_test.go | 200 ------------------- views.go | 199 ------------------- 30 files changed, 610 insertions(+), 933 deletions(-) rename SECURITY.md => .github/SECURITY.md (100%) delete mode 100644 CONTRIBUTING.md delete mode 100644 Docker.md create mode 100644 cmd/feedvault/main.go delete mode 100644 html_test.go delete mode 100644 main.go delete mode 100644 models.go create mode 100644 pkg/handlers/api.go create mode 100644 pkg/handlers/feeds.go create mode 100644 pkg/handlers/index.go create mode 100644 pkg/handlers/opml.go rename html.go => pkg/html/html.go (84%) create mode 100644 pkg/models/models.go rename opml.go => pkg/opml/opml.go (92%) rename quotes.go => pkg/quotes/quotes.go (67%) rename stats.go => pkg/stats/stats.go (98%) rename validate.go => pkg/validate/validate.go (97%) delete mode 100644 sql/queries/.gitkeep delete mode 100644 sql/schema/.gitkeep delete mode 100644 sqlc.yaml delete mode 100644 stats_test.go create mode 100644 tests/html_test.go rename opml_test.go => tests/opml_test.go (89%) create mode 100644 tests/validate_test.go rename views_test.go => tests/views_test.go (88%) delete mode 100644 validate_test.go delete mode 100644 views.go diff --git a/SECURITY.md b/.github/SECURITY.md similarity index 100% rename from SECURITY.md rename to .github/SECURITY.md diff --git a/.vscode/settings.json b/.vscode/settings.json index 0c0545a..76cb6be 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -79,10 +79,5 @@ "webmail", "XOXO", "zerolog" - ], - "terminal.integrated.env.windows": { - "GOOSE_DRIVER": "postgres", - "GOOSE_DBSTRING": "user=feedvault password=feedvault dbname=feedvault sslmode=disable", - "GOOSE_MIGRATION_DIR": "${workspaceFolder}/sql/schema" - } + ] } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 0f81233..0000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,43 +0,0 @@ -# Contributing to FeedVault - -Feel free to create a pull request for things like bug fixes, new features, and improvements. Your pull request doesn't have to be perfect, it just needs to work (or show what you're thinking). I can help you with the rest if needed. If you're not sure about something, feel free to open an issue first to discuss it. - -Please don't add any dependencies unless it's absolutely necessary. I want to try to keep the project using the standard library as much as possible. - -We use GitHub issues for tracking requests and bugs, so feel free to open an issue if you have any questions or need help. - -Thank you for your contributions! - -## Running the project - -You can run the project using the following command: - -```bash -go run cmd/feedvault/main.go -``` - -You can also run the tests using: - -```bash -go test ./... -``` - -## Using Docker - -We have a [Docker.md](Docker.md) file with instructions on how to run the project using Docker. - -## Using sqlc and goose - -I use [sqlc](https://docs.sqlc.dev/en/latest/index.html) for generating type safe Go from SQL. Make sure to regenerate the code after changing any SQL queries: - -```bash -sqlc generate -``` - -[goose](https://pressly.github.io/goose/) is used for managing database migrations. To create a new migration, run: - -```bash -goose create add_some_column sql -goose status -goose up -``` diff --git a/Docker.md b/Docker.md deleted file mode 100644 index da89b51..0000000 --- a/Docker.md +++ /dev/null @@ -1,31 +0,0 @@ -# Docker Compose - -You can run the project using Docker Compose. You can use the following commands to build, run, and stop the project: - -```bash -docker compose build -docker compose up -docker compose down -``` - -## Accessing the database - -```bash -docker-compose exec db psql -U feedvault -d feedvault -``` - -## Environment variables - -You can use the following environment variables to configure the project: - -- `PORT`: The port to listen on (default: `8000`) -- `DATABASE_URL`: The URL of the database (default: `postgres://feedvault:feedvault@db/feedvault?sslmode=disable`) - - FeedVault only supports PostgreSQL at the moment -- `ADMIN_EMAIL`: The email where we should email errors to. -- `EMAIL_HOST_USER`: The email address to send emails from. -- `EMAIL_HOST_PASSWORD`: The password for the email address to send emails from. -- `EMAIL_HOST`: The SMTP server to send emails through. (default: `smtp.gmail.com`) -- `EMAIL_PORT`: The port to send emails through. (default: `587`) -- `DISCORD_WEBHOOK_URL`: The Discord webhook URL to send messages to. -- `APP_ENV`: The environment the app is running in. Development or Production. (default: `development`) -- `USER_AGENT`: The user agent to use for making requests. (default: `None`) diff --git a/cmd/feedvault/main.go b/cmd/feedvault/main.go new file mode 100644 index 0000000..674499b --- /dev/null +++ b/cmd/feedvault/main.go @@ -0,0 +1,36 @@ +package main + +import ( + "log" + + "net/http" + + "github.com/TheLovinator1/FeedVault/pkg/handlers" +) + +func init() { log.SetFlags(log.LstdFlags | log.Lshortfile) } + +func main() { + log.Print("Starting server") + + // Create a new ServeMux + mux := http.NewServeMux() + + // Routes + mux.HandleFunc("/", handlers.IndexHandler) + mux.HandleFunc("/api", handlers.ApiHandler) + mux.HandleFunc("/feeds", handlers.FeedsHandler) + mux.HandleFunc("/add", handlers.AddFeedHandler) + mux.HandleFunc("/upload_opml", handlers.UploadOpmlHandler) + + // Create server + server := &http.Server{ + Addr: "127.0.0.1:8000", + Handler: mux, + } + + log.Print("Server started on http://localhost:8000/ to stop") + if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed { + log.Fatalf("ListenAndServe(): %v", err) + } +} diff --git a/go.mod b/go.mod index dd3a996..73d5a4b 100644 --- a/go.mod +++ b/go.mod @@ -1,20 +1,3 @@ module github.com/TheLovinator1/FeedVault go 1.22.0 - -require ( - github.com/stretchr/testify v1.8.4 - github.com/tdewolff/minify/v2 v2.20.16 - gorm.io/driver/sqlite v1.5.5 - gorm.io/gorm v1.25.7 -) - -require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/jinzhu/inflection v1.0.0 // indirect - github.com/jinzhu/now v1.1.5 // indirect - github.com/mattn/go-sqlite3 v1.14.22 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/tdewolff/parse/v2 v2.7.11 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect -) diff --git a/go.sum b/go.sum index 2db0ad1..e69de29 100644 --- a/go.sum +++ b/go.sum @@ -1,27 +0,0 @@ -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= -github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= -github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= -github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= -github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= -github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/tdewolff/minify/v2 v2.20.16 h1:/C8dtRkxLTIyUlKlBz46gDiktCrE8a6+c1gTrnPFz+U= -github.com/tdewolff/minify/v2 v2.20.16/go.mod h1:/FvxV9KaTrFu35J9I2FhRvWSBxcHj8sDSdwBFh5voxM= -github.com/tdewolff/parse/v2 v2.7.11 h1:v+W45LnzmjndVlfqPCT5gGjAAZKd1GJGOPJveTIkBY8= -github.com/tdewolff/parse/v2 v2.7.11/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1aghka0soA= -github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= -github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739 h1:IkjBCtQOOjIn03u/dMQK9g+Iw9ewps4mCl1nB8Sscbo= -github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gorm.io/driver/sqlite v1.5.5 h1:7MDMtUZhV065SilG62E0MquljeArQZNfJnjd9i9gx3E= -gorm.io/driver/sqlite v1.5.5/go.mod h1:6NgQ7sQWAIFsPrJJl1lSNSu2TABh0ZZ/zm5fosATavE= -gorm.io/gorm v1.25.7 h1:VsD6acwRjz2zFxGO50gPO6AkNs7KKnvfzUjHQhZDz/A= -gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= diff --git a/html_test.go b/html_test.go deleted file mode 100644 index 93a4293..0000000 --- a/html_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package main - -import ( - "strings" - "testing" -) - -// returns a minified version of the input HTML string -func TestMinifyHTML(t *testing.T) { - input := "Test

Hello, World!

" - expected := "Test

Hello, World!

" - - result := minifyHTML(input) - - if result != expected { - t.Errorf("Expected minified HTML: %s, but got: %s", expected, result) - } -} - -func TestMinifyCSS(t *testing.T) { - cssString := ` - body { - background-color: red; - color: blue; - } - ` - expected := "body{background-color:red;color:blue}" - result := minifyCSS(cssString) - if result != expected { - t.Errorf("Expected minified CSS string to be %s, but got %s", expected, result) - } -} - -// Displays error messages if there are any parse errors -func TestErrorMessages(t *testing.T) { - // Initialize test data - parseResult := []ParseResult{ - {IsError: true, Msg: "Error 1"}, - {IsError: true, Msg: "Error 2"}, - } - - h := HTMLData{ - ParseResult: parseResult, - } - - // Invoke function under test - result := fullHTML(h) - - // Assert that the result contains the error messages - if !strings.Contains(result, "Error 1") || !strings.Contains(result, "Error 2") { - t.Errorf("Expected error messages, but got: %s", result) - } -} diff --git a/main.go b/main.go deleted file mode 100644 index 4c55daf..0000000 --- a/main.go +++ /dev/null @@ -1,55 +0,0 @@ -package main - -import ( - "log" - "net/http" - - "gorm.io/driver/sqlite" - "gorm.io/gorm" -) - -var db *gorm.DB - -// Initialize the database -func init() { - var err error - db, err = gorm.Open(sqlite.Open("feedvault.db"), &gorm.Config{}) - if err != nil { - panic("Failed to connect to database") - } - if db == nil { - panic("db nil") - } - log.Println("Connected to database") - - // Migrate the schema - err = db.AutoMigrate(&Feed{}, &Item{}, &Person{}, &Image{}, &Enclosure{}, &DublinCoreExtension{}, &ITunesFeedExtension{}, &ITunesItemExtension{}, &ITunesCategory{}, &ITunesOwner{}, &Extension{}) - if err != nil { - panic("Failed to migrate the database") - } -} - -func main() { - log.Println("Starting FeedVault...") - - // Create a new ServeMux - mux := http.NewServeMux() - - // Routes - mux.HandleFunc("/", IndexHandler) - mux.HandleFunc("/api", ApiHandler) - mux.HandleFunc("/feeds", FeedsHandler) - mux.HandleFunc("/add", AddFeedHandler) - mux.HandleFunc("/upload_opml", UploadOpmlHandler) - - // Create server - server := &http.Server{ - Addr: "127.0.0.1:8000", - Handler: mux, - } - - log.Println("Listening on http://localhost:8000/ to stop") - if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed { - log.Fatalf("Server error: %v", err) - } -} diff --git a/models.go b/models.go deleted file mode 100644 index dc1db16..0000000 --- a/models.go +++ /dev/null @@ -1,180 +0,0 @@ -package main - -import ( - "net/http" - "time" - - "gorm.io/gorm" -) - -type Feed struct { - gorm.Model - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Link string `json:"link,omitempty"` - FeedLink string `json:"feedLink,omitempty"` - Links []string `gorm:"type:text[]" json:"links,omitempty"` - Updated string `json:"updated,omitempty"` - UpdatedParsed *time.Time `json:"updatedParsed,omitempty"` - Published string `json:"published,omitempty"` - PublishedParsed *time.Time `json:"publishedParsed,omitempty"` - Authors []*Person `gorm:"many2many:feed_authors;" json:"authors,omitempty"` - Language string `json:"language,omitempty"` - Image *Image `gorm:"foreignKey:ID" json:"image,omitempty"` - Copyright string `json:"copyright,omitempty"` - Generator string `json:"generator,omitempty"` - Categories []string `gorm:"type:text[]" json:"categories,omitempty"` - DublinCoreExt *DublinCoreExtension `gorm:"foreignKey:ID" json:"dcExt,omitempty"` - ITunesExt *ITunesFeedExtension `gorm:"foreignKey:ID" json:"itunesExt,omitempty"` - Extensions Extensions `gorm:"type:json" json:"extensions,omitempty"` - Custom map[string]string `gorm:"type:json" json:"custom,omitempty"` - Items []*Item `gorm:"foreignKey:ID" json:"items,omitempty"` - FeedType string `json:"feedType"` - FeedVersion string `json:"feedVersion"` -} - -type Item struct { - gorm.Model - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Content string `json:"content,omitempty"` - Link string `json:"link,omitempty"` - Links []string `gorm:"type:text[]" json:"links,omitempty"` - Updated string `json:"updated,omitempty"` - UpdatedParsed *time.Time `json:"updatedParsed,omitempty"` - Published string `json:"published,omitempty"` - PublishedParsed *time.Time `json:"publishedParsed,omitempty"` - Authors []*Person `gorm:"many2many:item_authors;" json:"authors,omitempty"` - GUID string `json:"guid,omitempty"` - Image *Image `gorm:"foreignKey:ID" json:"image,omitempty"` - Categories []string `gorm:"type:text[]" json:"categories,omitempty"` - Enclosures []*Enclosure `gorm:"foreignKey:ID" json:"enclosures,omitempty"` - DublinCoreExt *DublinCoreExtension `gorm:"foreignKey:ID" json:"dcExt,omitempty"` - ITunesExt *ITunesFeedExtension `gorm:"foreignKey:ID" json:"itunesExt,omitempty"` - Extensions Extensions `gorm:"type:json" json:"extensions,omitempty"` - Custom map[string]string `gorm:"type:json" json:"custom,omitempty"` -} - -type Person struct { - gorm.Model - Name string `json:"name,omitempty"` - Email string `json:"email,omitempty"` -} - -func (Person) TableName() string { - return "feed_authors" -} - -type Image struct { - gorm.Model - URL string `json:"url,omitempty"` - Title string `json:"title,omitempty"` -} - -type Enclosure struct { - gorm.Model - URL string `json:"url,omitempty"` - Length string `json:"length,omitempty"` - Type string `json:"type,omitempty"` -} - -type DublinCoreExtension struct { - gorm.Model - Title []string `gorm:"type:text[]" json:"title,omitempty"` - Creator []string `gorm:"type:text[]" json:"creator,omitempty"` - Author []string `gorm:"type:text[]" json:"author,omitempty"` - Subject []string `gorm:"type:text[]" json:"subject,omitempty"` - Description []string `gorm:"type:text[]" json:"description,omitempty"` - Publisher []string `gorm:"type:text[]" json:"publisher,omitempty"` - Contributor []string `gorm:"type:text[]" json:"contributor,omitempty"` - Date []string `gorm:"type:text[]" json:"date,omitempty"` - Type []string `gorm:"type:text[]" json:"type,omitempty"` - Format []string `gorm:"type:text[]" json:"format,omitempty"` - Identifier []string `gorm:"type:text[]" json:"identifier,omitempty"` - Source []string `gorm:"type:text[]" json:"source,omitempty"` - Language []string `gorm:"type:text[]" json:"language,omitempty"` - Relation []string `gorm:"type:text[]" json:"relation,omitempty"` - Coverage []string `gorm:"type:text[]" json:"coverage,omitempty"` - Rights []string `gorm:"type:text[]" json:"rights,omitempty"` -} - -type ITunesFeedExtension struct { - gorm.Model - Author string `json:"author,omitempty"` - Block string `json:"block,omitempty"` - Categories []*ITunesCategory `gorm:"many2many:feed_itunes_categories;" json:"categories,omitempty"` - Explicit string `json:"explicit,omitempty"` - Keywords string `json:"keywords,omitempty"` - Owner *ITunesOwner `gorm:"foreignKey:ID" json:"owner,omitempty"` - Subtitle string `json:"subtitle,omitempty"` - Summary string `json:"summary,omitempty"` - Image string `json:"image,omitempty"` - Complete string `json:"complete,omitempty"` - NewFeedURL string `json:"newFeedUrl,omitempty"` - Type string `json:"type,omitempty"` -} - -type ITunesItemExtension struct { - gorm.Model - Author string `json:"author,omitempty"` - Block string `json:"block,omitempty"` - Duration string `json:"duration,omitempty"` - Explicit string `json:"explicit,omitempty"` - Keywords string `json:"keywords,omitempty"` - Subtitle string `json:"subtitle,omitempty"` - Summary string `json:"summary,omitempty"` - Image string `json:"image,omitempty"` - IsClosedCaptioned string `json:"isClosedCaptioned,omitempty"` - Episode string `json:"episode,omitempty"` - Season string `json:"season,omitempty"` - Order string `json:"order,omitempty"` - EpisodeType string `json:"episodeType,omitempty"` -} - -type ITunesCategory struct { - gorm.Model - Text string `json:"text,omitempty"` - Subcategory *ITunesCategory `gorm:"many2many:feed_itunes_categories;" json:"subcategory,omitempty"` -} - -func (ITunesCategory) TableName() string { - return "feed_itunes_categories" -} - -type ITunesOwner struct { - gorm.Model - Email string `json:"email,omitempty"` - Name string `json:"name,omitempty"` -} - -type Extensions map[string]map[string][]Extension - -type Extension struct { - gorm.Model - Name string `json:"name"` - Value string `json:"value"` - Attrs map[string]string `gorm:"type:json" json:"attrs"` - Children map[string][]Extension `gorm:"type:json" json:"children"` -} - -type TemplateData struct { - Title string - Description string - Keywords string - Author string - CanonicalURL string - FeedCount int - DatabaseSize string - Request *http.Request - ParseErrors []ParseResult -} - -type ParseResult struct { - FeedURL string - Msg string - IsError bool -} - -func (d *TemplateData) GetDatabaseSizeAndFeedCount() { - d.DatabaseSize = GetDBSize() -} diff --git a/pkg/handlers/api.go b/pkg/handlers/api.go new file mode 100644 index 0000000..da44073 --- /dev/null +++ b/pkg/handlers/api.go @@ -0,0 +1,20 @@ +package handlers + +import ( + "net/http" + + "github.com/TheLovinator1/FeedVault/pkg/html" +) + +func ApiHandler(w http.ResponseWriter, _ *http.Request) { + htmlData := html.HTMLData{ + Title: "FeedVault API", + Description: "FeedVault API - A feed archive", + Keywords: "RSS, Atom, Feed, Archive, API", + Author: "TheLovinator", + CanonicalURL: "http://localhost:8000/api", + Content: "

Here be dragons.

", + } + html := html.FullHTML(htmlData) + w.Write([]byte(html)) +} diff --git a/pkg/handlers/feeds.go b/pkg/handlers/feeds.go new file mode 100644 index 0000000..c5ba21f --- /dev/null +++ b/pkg/handlers/feeds.go @@ -0,0 +1,64 @@ +package handlers + +import ( + "log" + "net/http" + "strings" + + "github.com/TheLovinator1/FeedVault/pkg/html" + "github.com/TheLovinator1/FeedVault/pkg/models" + "github.com/TheLovinator1/FeedVault/pkg/validate" +) + +func FeedsHandler(w http.ResponseWriter, _ *http.Request) { + htmlData := html.HTMLData{ + Title: "FeedVault Feeds", + Description: "FeedVault Feeds - A feed archive", + Keywords: "RSS, Atom, Feed, Archive", + Author: "TheLovinator", + CanonicalURL: "http://localhost:8000/feeds", + Content: "

Here be feeds.

", + } + html := html.FullHTML(htmlData) + w.Write([]byte(html)) +} + +func AddFeedHandler(w http.ResponseWriter, r *http.Request) { + var parseErrors []models.ParseResult + + // Parse the form and get the URLs + r.ParseForm() + urls := r.Form.Get("urls") + if urls == "" { + http.Error(w, "No URLs provided", http.StatusBadRequest) + return + } + + for _, feed_url := range strings.Split(urls, "\n") { + // TODO: Try to upgrade to https if http is provided + + // Validate the URL + err := validate.ValidateFeedURL(feed_url) + if err != nil { + parseErrors = append(parseErrors, models.ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true}) + continue + } + + // "Add" the feed to the database + log.Println("Adding feed:", feed_url) + parseErrors = append(parseErrors, models.ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false}) + + } + htmlData := html.HTMLData{ + Title: "FeedVault", + Description: "FeedVault - A feed archive", + Keywords: "RSS, Atom, Feed, Archive", + Author: "TheLovinator", + CanonicalURL: "http://localhost:8000/", + Content: "

Feeds added.

", + ParseResult: parseErrors, + } + + html := html.FullHTML(htmlData) + w.Write([]byte(html)) +} diff --git a/pkg/handlers/index.go b/pkg/handlers/index.go new file mode 100644 index 0000000..e1ff056 --- /dev/null +++ b/pkg/handlers/index.go @@ -0,0 +1,79 @@ +package handlers + +import ( + "net/http" + + "github.com/TheLovinator1/FeedVault/pkg/html" +) + +func IndexHandler(w http.ResponseWriter, _ *http.Request) { + + content := `

Feeds to archive

+

+ Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived. +

+
+ + +
+
+

You can also upload .opml files containing the feeds you wish to archive:

+
+ + +
+ ` + + FAQ := ` + +

FAQ

+
+ What are web feeds? +

+ Web feeds are a way to distribute content on the web. They allow users to access updates from websites without having to visit them directly. Feeds are typically used for news websites, blogs, and other sites that frequently update content. +
+ You can read more about web feeds on Wikipedia. +

+
+
+
+ What is FeedVault? +

+ FeedVault is a service that archives web feeds. It allows users to access and search for historical content from various websites. The service is designed to preserve the history of the web and provide a reliable source for accessing content that may no longer be available on the original websites. +

+
+
+
+ Why archive feeds? +

+ Web feeds are a valuable source of information, and archiving them ensures that the content is preserved for future reference. By archiving feeds, we can ensure that historical content is available for research, analysis, and other purposes. Additionally, archiving feeds can help prevent the loss of valuable information due to website changes, outages, or other issues. +

+
+
+
+ How does it work? +

+ FeedVault is written in Go and uses the gofeed library to parse feeds. The service periodically checks for new content in the feeds and stores it in a database. Users can access the archived feeds through the website or API. +


+
+
+ How can I access the archived feeds? +

+ You can access the archived feeds through the website or API. The website provides a user interface for searching and browsing the feeds, while the API allows you to access the feeds programmatically. You can also download the feeds in various formats, such as JSON, XML, or RSS. +

+
+ ` + + content += FAQ + + htmlData := html.HTMLData{ + Title: "FeedVault", + Description: "FeedVault - A feed archive", + Keywords: "RSS, Atom, Feed, Archive", + Author: "TheLovinator", + CanonicalURL: "http://localhost:8000/", + Content: content, + } + html := html.FullHTML(htmlData) + w.Write([]byte(html)) +} diff --git a/pkg/handlers/opml.go b/pkg/handlers/opml.go new file mode 100644 index 0000000..96395d9 --- /dev/null +++ b/pkg/handlers/opml.go @@ -0,0 +1,63 @@ +package handlers + +import ( + "io" + "log" + "net/http" + + "github.com/TheLovinator1/FeedVault/pkg/html" + "github.com/TheLovinator1/FeedVault/pkg/models" + "github.com/TheLovinator1/FeedVault/pkg/opml" + "github.com/TheLovinator1/FeedVault/pkg/validate" +) + +func UploadOpmlHandler(w http.ResponseWriter, r *http.Request) { + // Parse the form and get the file + r.ParseMultipartForm(10 << 20) // 10 MB + file, _, err := r.FormFile("file") + if err != nil { + http.Error(w, "No file provided", http.StatusBadRequest) + return + } + defer file.Close() + + // Read the file + all, err := io.ReadAll(file) + if err != nil { + http.Error(w, "Failed to read file", http.StatusInternalServerError) + return + } + // Parse the OPML file + parseResult := []models.ParseResult{} + links, err := opml.ParseOpml(string(all)) + if err != nil { + parseResult = append(parseResult, models.ParseResult{FeedURL: "/upload_opml", Msg: err.Error(), IsError: true}) + } else { + // Add the feeds to the database + for _, feed_url := range links.XMLLinks { + log.Println("Adding feed:", feed_url) + + // Validate the URL + err := validate.ValidateFeedURL(feed_url) + if err != nil { + parseResult = append(parseResult, models.ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true}) + continue + } + + parseResult = append(parseResult, models.ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false}) + } + } + + // Return the results + htmlData := html.HTMLData{ + Title: "FeedVault", + Description: "FeedVault - A feed archive", + Keywords: "RSS, Atom, Feed, Archive", + Author: "TheLovinator", + CanonicalURL: "http://localhost:8000/", + Content: "

Feeds added.

", + ParseResult: parseResult, + } + html := html.FullHTML(htmlData) + w.Write([]byte(html)) +} diff --git a/html.go b/pkg/html/html.go similarity index 84% rename from html.go rename to pkg/html/html.go index b8abadc..7177902 100644 --- a/html.go +++ b/pkg/html/html.go @@ -1,13 +1,13 @@ -package main +package html import ( "fmt" "math/rand" "strings" - "github.com/tdewolff/minify/v2" - "github.com/tdewolff/minify/v2/css" - "github.com/tdewolff/minify/v2/html" + "github.com/TheLovinator1/FeedVault/pkg/models" + "github.com/TheLovinator1/FeedVault/pkg/quotes" + "github.com/TheLovinator1/FeedVault/pkg/stats" ) type HTMLData struct { @@ -17,27 +17,7 @@ type HTMLData struct { Author string CanonicalURL string Content string - ParseResult []ParseResult -} - -func minifyHTML(h string) string { - m := minify.New() - m.AddFunc("text/html", html.Minify) - minified, err := m.String("text/html", h) - if err != nil { - return h - } - return minified -} - -func minifyCSS(h string) string { - m := minify.New() - m.AddFunc("text/css", css.Minify) - minified, err := m.String("text/css", h) - if err != nil { - return h - } - return minified + ParseResult []models.ParseResult } var style = ` @@ -99,12 +79,12 @@ textarea { } ` -func fullHTML(h HTMLData) string { +func FullHTML(h HTMLData) string { var sb strings.Builder var errorBuilder strings.Builder FeedCount := 0 - DatabaseSize := GetDBSize() + DatabaseSize := stats.GetDBSize() // This is the error message that will be displayed if there are any errors if len(h.ParseResult) > 0 { @@ -151,7 +131,7 @@ func fullHTML(h HTMLData) string { sb.WriteString(` ` + h.Title + ` - + ` + StatusMsg + ` @@ -198,7 +178,7 @@ func fullHTML(h HTMLData) string { hello@feedvault.se
- ` + funMsg[rand.Intn(len(funMsg))] + ` + ` + quotes.FunMsg[rand.Intn(len(quotes.FunMsg))] + `
@@ -206,6 +186,6 @@ func fullHTML(h HTMLData) string { `) - return minifyHTML(sb.String()) + return sb.String() } diff --git a/pkg/models/models.go b/pkg/models/models.go new file mode 100644 index 0000000..730a030 --- /dev/null +++ b/pkg/models/models.go @@ -0,0 +1,32 @@ +package models + +import ( + "net/http" + + "github.com/TheLovinator1/FeedVault/pkg/stats" +) + +type TemplateData struct { + Title string + Description string + Keywords string + Author string + CanonicalURL string + FeedCount int + DatabaseSize string + Request *http.Request + ParseErrors []ParseResult +} + +type ParseResult struct { + FeedURL string + Msg string + IsError bool +} + +func (d *TemplateData) GetDatabaseSizeAndFeedCount() { + // TODO: Get the feed count from the database + // TODO: Add amount of entries + // TODO: Add amount of users + d.DatabaseSize = stats.GetDBSize() +} diff --git a/opml.go b/pkg/opml/opml.go similarity index 92% rename from opml.go rename to pkg/opml/opml.go index 501ab36..185f1dd 100644 --- a/opml.go +++ b/pkg/opml/opml.go @@ -1,4 +1,4 @@ -package main +package opml import "encoding/xml" @@ -36,7 +36,7 @@ type linksFromOpml struct { HTMLLinks []string `json:"htmlLinks"` } -func removeDuplicates(s []string) []string { +func RemoveDuplicates(s []string) []string { seen := make(map[string]struct{}, len(s)) j := 0 for _, v := range s { @@ -87,8 +87,8 @@ func ParseOpml(s string) (linksFromOpml, error) { } // Remove any duplicates - links.XMLLinks = removeDuplicates(links.XMLLinks) - links.HTMLLinks = removeDuplicates(links.HTMLLinks) + links.XMLLinks = RemoveDuplicates(links.XMLLinks) + links.HTMLLinks = RemoveDuplicates(links.HTMLLinks) return links, nil } diff --git a/quotes.go b/pkg/quotes/quotes.go similarity index 67% rename from quotes.go rename to pkg/quotes/quotes.go index eb276db..33cce6b 100644 --- a/quotes.go +++ b/pkg/quotes/quotes.go @@ -1,12 +1,7 @@ -package main - -import ( - "fmt" - "math/rand" -) +package quotes // "Fun" messages that will be displayed in the footer -var funMsg = []string{ +var FunMsg = []string{ "Web scraping is not a crime.", "Made in Sweden.", "🙃", @@ -16,14 +11,12 @@ var funMsg = []string{ "A feed in the hand is worth two in the bush.", "Death begets death begets death.", "I am Eo's dream.", - "Through the thorns to the stars.", "Freedom in an unjust system is no freedom at all.", "Omnis vir lupus.", "Shit escalates.", "Break the chains, my love.", "Sharpened by hate. Strengthened by love.", "Hic sunt leones.", - "The Reaper has come. And he's brought hell with him.", "Keyboard not found. Press F1 to continue.", "The stars shine brighter when shared among comrades.", "Zzz... 🛌", @@ -31,26 +24,12 @@ var funMsg = []string{ "Open source, open heart.", "RSS is the new black.", "Unsubscribe.", - "Copyright © 2004-2021 Microsoft Corporation.", "ChatGPT made 99% of this website :-)", - fmt.Sprintf("%d is the year of the Linux desktop.", 2024+rand.Intn(100)), - ":-)", - ":^)", - "( ͡° ͜ʖ ͡°)", - "pls seed", - "My life for Aiur!", - "For the swarm!", - "Do not the cat.", - "hal[", - "Meow?", - "Rawr!", "👁️👄👁️", "From each planet, to each star—equality in the cosmos.", "In the vastness of space, no one should own more than they can share.", - "http://", "Workers of the universe, unite! The stars are our common heritage.", "Space is for all, not just the privileged few.", - "No more celestial landlords!", "From the red planet to the black hole, solidarity knows no borders.", "Astronauts of the world, unite for a cosmic revolution!", "Space is for everyone, not just the 1%.", @@ -59,7 +38,6 @@ var funMsg = []string{ "The red glow of the stars reflects the spirit of collective effort.", "The final frontier is a shared frontier, where no one is left behind.", "Vote for a space utopia!", - "Space is for the many, not the few.", "From the Milky Way to Andromeda, the stars belong to the people.", "Space is for the workers, not the bosses.", "Let the fruits of progress be the common heritage of all.", @@ -67,24 +45,13 @@ var funMsg = []string{ "The stars do not discriminate; neither should we.", "In the vacuum of space, let equality fill the void.", "From Big Bang to the heat death of the universe, solidarity is eternal.", - "Your body betrays your degeneracy.", - "You need to get your shit together.", - "I can't help you. I am totally useless.", "In dark times, should the stars also go out?", "One day I will return to your side.", "Un Jour Je Serai de Retour Prés de Toi", - "These communists aren't men, they're *mole people*!", - "You should build Communism — precisely *because* it's impossible.", + "You should build Space Communism — precisely *because* it's impossible.", "She thinks you are an idiot, sire.", "The song of death is sweet and endless.", "Child-murdering billionaires still rule the world with a shit-eating grin.", - "Instead of building Communism, he now builds grotesque sites.", "Eight billion people - and you failed every single one of them.", "You are the first crack. From you it will spread.", - "Playing Sad F.M.", - "Do you remember the scent of your childhood?", - "You are a man with a lot of past, little present and almost no future.", - "European utopia or death", - "What do the know about Sweden, who only Sweden know?", - "Imagine a life in which all your basic needs were met.", } diff --git a/stats.go b/pkg/stats/stats.go similarity index 98% rename from stats.go rename to pkg/stats/stats.go index 4387d47..98fbc8f 100644 --- a/stats.go +++ b/pkg/stats/stats.go @@ -1,4 +1,4 @@ -package main +package stats import ( "fmt" diff --git a/validate.go b/pkg/validate/validate.go similarity index 97% rename from validate.go rename to pkg/validate/validate.go index 722709b..0a25d6d 100644 --- a/validate.go +++ b/pkg/validate/validate.go @@ -1,4 +1,4 @@ -package main +package validate import ( "errors" @@ -9,7 +9,7 @@ import ( ) // Run some simple validation on the URL -func validateURL(feed_url string) error { +func ValidateFeedURL(feed_url string) error { // Check if URL starts with http or https if !strings.HasPrefix(feed_url, "http://") && !strings.HasPrefix(feed_url, "https://") { return errors.New("URL must start with http:// or https://") diff --git a/sql/queries/.gitkeep b/sql/queries/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/sql/schema/.gitkeep b/sql/schema/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/sqlc.yaml b/sqlc.yaml deleted file mode 100644 index a1ac30d..0000000 --- a/sqlc.yaml +++ /dev/null @@ -1,9 +0,0 @@ -version: "2" -sql: - - engine: "postgresql" - queries: "sql/queries" - schema: "sql/schema" - gen: - go: - sql_package: "pgx/v5" - out: "internal/database" diff --git a/stats_test.go b/stats_test.go deleted file mode 100644 index 59ab2dc..0000000 --- a/stats_test.go +++ /dev/null @@ -1,28 +0,0 @@ -package main - -import ( - "testing" - "time" -) - -// If the cache is less than 10 minutes old, return the cached data. -func TestCacheLessThan10MinutesOld(t *testing.T) { - result := GetDBSize() - - // Assert that the size of the database is returned - if result != cache.data { - t.Errorf("Expected database size, but got %s", result) - } -} - -// If the cache is more than 10 minutes old, return the size of the database. -func TestCacheMoreThan10MinutesOld(t *testing.T) { - // Set the cache timestamp to 11 minutes ago - cache.timestamp = time.Now().Add(-11 * time.Minute) - result := GetDBSize() - - // Assert that the size of the database is returned - if result != cache.data { - t.Errorf("Expected database size, but got %s", result) - } -} diff --git a/tests/html_test.go b/tests/html_test.go new file mode 100644 index 0000000..5adb21c --- /dev/null +++ b/tests/html_test.go @@ -0,0 +1,30 @@ +package main + +import ( + "strings" + "testing" + + "github.com/TheLovinator1/FeedVault/pkg/html" + "github.com/TheLovinator1/FeedVault/pkg/models" +) + +// Displays error messages if there are any parse errors +func TestErrorMessages(t *testing.T) { + // Initialize test data + parseResult := []models.ParseResult{ + {IsError: true, Msg: "Error 1"}, + {IsError: true, Msg: "Error 2"}, + } + + h := html.HTMLData{ + ParseResult: parseResult, + } + + // Invoke function under test + result := html.FullHTML(h) + + // Assert that the result contains the error messages + if !strings.Contains(result, "Error 1") || !strings.Contains(result, "Error 2") { + t.Errorf("Expected error messages, but got: %s", result) + } +} diff --git a/opml_test.go b/tests/opml_test.go similarity index 89% rename from opml_test.go rename to tests/opml_test.go index 180554a..d8c6e00 100644 --- a/opml_test.go +++ b/tests/opml_test.go @@ -1,6 +1,10 @@ package main -import "testing" +import ( + "testing" + + "github.com/TheLovinator1/FeedVault/pkg/opml" +) var opmlExample = ` @@ -30,7 +34,7 @@ var secondOpmlExample = ` // Test the opml parser func TestParseOpml(t *testing.T) { - links, err := ParseOpml(opmlExample) + links, err := opml.ParseOpml(opmlExample) if err != nil { t.Error(err) } @@ -42,8 +46,8 @@ func TestParseOpml(t *testing.T) { } // Test that the links are unique - links.XMLLinks = removeDuplicates(links.XMLLinks) - links.HTMLLinks = removeDuplicates(links.HTMLLinks) + links.XMLLinks = opml.RemoveDuplicates(links.XMLLinks) + links.HTMLLinks = opml.RemoveDuplicates(links.HTMLLinks) if len(links.XMLLinks) != 2 { t.Errorf("Expected 2 links, got %d", len(links.XMLLinks)) } @@ -69,7 +73,7 @@ func TestParseOpml(t *testing.T) { // Test the opml parser with nested outlines func TestParseOpmlNested(t *testing.T) { - links, err := ParseOpml(secondOpmlExample) + links, err := opml.ParseOpml(secondOpmlExample) if err != nil { t.Error(err) } @@ -81,8 +85,8 @@ func TestParseOpmlNested(t *testing.T) { } // Test that the links are unique - links.XMLLinks = removeDuplicates(links.XMLLinks) - links.HTMLLinks = removeDuplicates(links.HTMLLinks) + links.XMLLinks = opml.RemoveDuplicates(links.XMLLinks) + links.HTMLLinks = opml.RemoveDuplicates(links.HTMLLinks) if len(links.XMLLinks) != 2 { t.Errorf("Expected 2 links, got %d", len(links.XMLLinks)) } diff --git a/tests/validate_test.go b/tests/validate_test.go new file mode 100644 index 0000000..5c78d9f --- /dev/null +++ b/tests/validate_test.go @@ -0,0 +1,248 @@ +package main + +import ( + "testing" + + "github.com/TheLovinator1/FeedVault/pkg/validate" +) + +// URL starts with http:// +func TestURLStartsWithHTTP(t *testing.T) { + url := "http://example.com" + err := validate.ValidateFeedURL(url) + if err != nil { + t.Errorf("Expected no error, got %v", err) + } +} + +// URL starts with https:// +func TestURLStartsWithHTTPS(t *testing.T) { + url := "https://example.com" + err := validate.ValidateFeedURL(url) + if err != nil { + t.Errorf("Expected no error, got %v", err) + } +} + +// URL contains a valid domain +func TestURLContainsValidDomain(t *testing.T) { + url := "http://example.com" + err := validate.ValidateFeedURL(url) + if err != nil { + t.Errorf("Expected no error, got %v", err) + } +} + +// URL is empty +func TestURLEmpty(t *testing.T) { + url := "" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "URL must start with http:// or https://" { + t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error()) + } +} + +// URL does not contain a domain +func TestURLNotNumbers(t *testing.T) { + url := "12345" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "URL must start with http:// or https://" { + t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error()) + } +} + +// URL is not a valid URL +func TestURLNotValidURL(t *testing.T) { + url := "example.com" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "URL must start with http:// or https://" { + t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error()) + } +} + +// Domain is resolvable +func TestDomainIsResolvable(t *testing.T) { + url := "http://example.com" + err := validate.ValidateFeedURL(url) + if err != nil { + t.Errorf("Expected no error, got %v", err) + } +} + +// Domain does not end with .local +func TestDomainDoesNotEndWithLocal(t *testing.T) { + url := "http://example.com" + err := validate.ValidateFeedURL(url) + if err != nil { + t.Errorf("Expected no error, got %v", err) + } +} + +// Domain is not localhost +func TestDomainIsNotLocalhost(t *testing.T) { + url := "http://example.com" + err := validate.ValidateFeedURL(url) + if err != nil { + t.Errorf("Expected no error, got %v", err) + } +} + +// Domain is not an IP address +func TestDomainIsNotIPAddress(t *testing.T) { + url := "http://example.com" + err := validate.ValidateFeedURL(url) + if err != nil { + t.Errorf("Expected no error, got %v", err) + } +} + +// URL is a file path +func TestURLIsFilePath(t *testing.T) { + url := "/path/to/file" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "URL must start with http:// or https://" { + t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error()) + } +} + +// URL is a relative path +func TestURLIsRelativePath(t *testing.T) { + url := "/path/to/resource" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "URL must start with http:// or https://" { + t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error()) + } +} + +// URL is a non-existent domain +func TestNonExistentDomainURL(t *testing.T) { + url := "http://jfsalksajlkfsajklfsajklfllfjffffkfsklslsksassflfskjlfjlfsjkalfsaf.com" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "failed to resolve domain" { + t.Errorf("Expected error message 'failed to resolve domain', got '%v'", err.Error()) + } +} + +// URL is a malformed URL +func TestMalformedURL(t *testing.T) { + url := "malformedurl" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "URL must start with http:// or https://" { + t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error()) + } +} + +// URL is a domain that does not support HTTP/HTTPS +func TestURLDomainNotSupportHTTP(t *testing.T) { + url := "ftp://example.com" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "URL must start with http:// or https://" { + t.Errorf("Expected error message 'URL must start with http:// or https://', got '%v'", err.Error()) + } +} + +// URL is an unreachable domain +func TestUnreachableDomain(t *testing.T) { + url := "http://fafsffsfsfsfsafsasafassfs.com" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "failed to resolve domain" { + t.Errorf("Expected error message 'failed to resolve domain', got '%v'", err.Error()) + } +} + +// URL is an IP address +func TestURLIsIPAddress(t *testing.T) { + url := "http://84.55.107.42" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "IP address URLs are not allowed" { + t.Errorf("Expected error message 'IP address URLs are not allowed', got '%v'", err.Error()) + } +} + +// URL ends with .local +func TestURLEndsWithLocal(t *testing.T) { + url := "http://example.local" + err := validate.ValidateFeedURL(url) + if err == nil { + t.Error("Expected an error, got nil") + } else if err.Error() != "URLs ending with .local are not allowed" { + t.Errorf("Expected error message 'URLs ending with .local are not allowed', got '%v'", err.Error()) + } +} + +func TestLocalURLs(t *testing.T) { + localURLs := []string{ + "https://localhost", + "https://home.arpa", + "https://airbox.home", + "https://airport", + "https://arcor.easybox", + "https://aterm.me", + "https://bthub.home", + "https://bthomehub.home", + "https://congstar.box", + "https://connect.box", + "https://console.gl-inet.com", + "https://easy.box", + "https://etxr", + "https://fire.walla", + "https://fritz.box", + "https://fritz.nas", + "https://fritz.repeater", + "https://giga.cube", + "https://hi.link", + "https://hitronhub.home", + "https://homerouter.cpe", + "https://huaweimobilewifi.com", + "https://localbattle.net", + "https://myfritz.box", + "https://mobile.hotspot", + "https://ntt.setup", + "https://pi.hole", + "https://plex.direct", + "https://repeater.asus.com", + "https://router.asus.com", + "https://routerlogin.com", + "https://routerlogin.net", + "https://samsung.router", + "https://speedport.ip", + "https://steamloopback.host", + "https://tplinkap.net", + "https://tplinkeap.net", + "https://tplinkmodem.net", + "https://tplinkplclogin.net", + "https://tplinkrepeater.net", + "https://tplinkwifi.net", + "https://web.setup", + "https://web.setup.home", + } + + for _, localURL := range localURLs { + err := validate.ValidateFeedURL(localURL) + if err == nil { + t.Errorf("Expected an error for local URL %s, got nil", localURL) + } else if err.Error() != "local URLs are not allowed" { + t.Errorf("Expected error message 'local URLs are not allowed', got '%v'", err.Error()) + } + } +} diff --git a/views_test.go b/tests/views_test.go similarity index 88% rename from views_test.go rename to tests/views_test.go index f5b0e49..5a8b0a2 100644 --- a/views_test.go +++ b/tests/views_test.go @@ -3,9 +3,10 @@ package main import ( "net/http" "net/http/httptest" + "strings" "testing" - "github.com/stretchr/testify/assert" + "github.com/TheLovinator1/FeedVault/pkg/handlers" ) func TestIndexHandler(t *testing.T) { @@ -17,7 +18,7 @@ func TestIndexHandler(t *testing.T) { // We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response. rr := httptest.NewRecorder() - handler := http.HandlerFunc(IndexHandler) + handler := http.HandlerFunc(handlers.IndexHandler) // Our handlers satisfy http.Handler, so we can call their ServeHTTP method // directly and pass in our Request and ResponseRecorder. @@ -32,7 +33,7 @@ func TestIndexHandler(t *testing.T) { // Check the response contains the expected string. shouldContain := "Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived." body := rr.Body.String() - if !assert.Contains(t, body, shouldContain) { + if !strings.Contains(body, shouldContain) { t.Errorf("handler returned unexpected body: got %v want %v", body, shouldContain) } @@ -47,7 +48,7 @@ func TestApiHandler(t *testing.T) { // We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response. rr := httptest.NewRecorder() - handler := http.HandlerFunc(ApiHandler) + handler := http.HandlerFunc(handlers.ApiHandler) // Our handlers satisfy http.Handler, so we can call their ServeHTTP method // directly and pass in our Request and ResponseRecorder. @@ -62,7 +63,7 @@ func TestApiHandler(t *testing.T) { // Check the response contains the expected string. shouldContain := "Here be dragons." body := rr.Body.String() - if !assert.Contains(t, body, shouldContain) { + if !strings.Contains(body, shouldContain) { t.Errorf("handler returned unexpected body: got %v want %v", body, shouldContain) } diff --git a/validate_test.go b/validate_test.go deleted file mode 100644 index 62e421a..0000000 --- a/validate_test.go +++ /dev/null @@ -1,200 +0,0 @@ -package main - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -// URL starts with http:// -func TestURLStartsWithHTTP(t *testing.T) { - url := "http://example.com" - err := validateURL(url) - assert.Nil(t, err) -} - -// URL starts with https:// -func TestURLStartsWithHTTPS(t *testing.T) { - url := "https://example.com" - err := validateURL(url) - assert.Nil(t, err) -} - -// URL contains a valid domain -func TestURLContainsValidDomain(t *testing.T) { - url := "http://example.com" - err := validateURL(url) - assert.Nil(t, err) -} - -// URL is empty -func TestURLEmpty(t *testing.T) { - url := "" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "URL must start with http:// or https://", err.Error()) -} - -// URL does not contain a domain -func TestURLNotNumbers(t *testing.T) { - url := "12345" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "URL must start with http:// or https://", err.Error()) -} - -// URL is not a valid URL -func TestURLNotValidURL(t *testing.T) { - url := "example.com" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "URL must start with http:// or https://", err.Error()) -} - -// Domain is resolvable -func TestDomainIsResolvable(t *testing.T) { - url := "http://example.com" - err := validateURL(url) - assert.Nil(t, err) -} - -// Domain does not end with .local -func TestDomainDoesNotEndWithLocal(t *testing.T) { - url := "http://example.com" - err := validateURL(url) - assert.Nil(t, err) -} - -// Domain is not localhost -func TestDomainIsNotLocalhost(t *testing.T) { - url := "http://example.com" - err := validateURL(url) - assert.Nil(t, err) -} - -// Domain is not an IP address -func TestDomainIsNotIPAddress(t *testing.T) { - url := "http://example.com" - err := validateURL(url) - assert.Nil(t, err) -} - -// URL is a file path -func TestURLIsFilePath(t *testing.T) { - url := "/path/to/file" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "URL must start with http:// or https://", err.Error()) -} - -// URL is a relative path -func TestURLIsRelativePath(t *testing.T) { - url := "/path/to/resource" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "URL must start with http:// or https://", err.Error()) -} - -// URL is a non-existent domain -func TestNonExistentDomainURL(t *testing.T) { - url := "http://jfsalksajlkfsajklfsajklfllfjffffkfsklslsksassflfskjlfjlfsjkalfsaf.com" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "failed to resolve domain", err.Error()) -} - -// URL is a malformed URL -func TestMalformedURL(t *testing.T) { - url := "malformedurl" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "URL must start with http:// or https://", err.Error()) -} - -// URL is a domain that does not support HTTP/HTTPS -func TestURLDomainNotSupportHTTP(t *testing.T) { - url := "ftp://example.com" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "URL must start with http:// or https://", err.Error()) -} - -// URL is an unreachable domain -func TestUnreachableDomain(t *testing.T) { - url := "http://fafsffsfsfsfsafsasafassfs.com" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "failed to resolve domain", err.Error()) -} - -// URL is an IP address -func TestURLIsIPAddress(t *testing.T) { - url := "http://84.55.107.42" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "IP address URLs are not allowed", err.Error()) -} - -// URL ends with .local -func TestURLEndsWithLocal(t *testing.T) { - url := "http://example.local" - err := validateURL(url) - assert.NotNil(t, err) - assert.Equal(t, "URLs ending with .local are not allowed", err.Error()) -} - -func TestLocalURLs(t *testing.T) { - localURLs := []string{ - "https://localhost", - "https://home.arpa", - "https://airbox.home", - "https://airport", - "https://arcor.easybox", - "https://aterm.me", - "https://bthub.home", - "https://bthomehub.home", - "https://congstar.box", - "https://connect.box", - "https://console.gl-inet.com", - "https://easy.box", - "https://etxr", - "https://fire.walla", - "https://fritz.box", - "https://fritz.nas", - "https://fritz.repeater", - "https://giga.cube", - "https://hi.link", - "https://hitronhub.home", - "https://homerouter.cpe", - "https://huaweimobilewifi.com", - "https://localbattle.net", - "https://myfritz.box", - "https://mobile.hotspot", - "https://ntt.setup", - "https://pi.hole", - "https://plex.direct", - "https://repeater.asus.com", - "https://router.asus.com", - "https://routerlogin.com", - "https://routerlogin.net", - "https://samsung.router", - "https://speedport.ip", - "https://steamloopback.host", - "https://tplinkap.net", - "https://tplinkeap.net", - "https://tplinkmodem.net", - "https://tplinkplclogin.net", - "https://tplinkrepeater.net", - "https://tplinkwifi.net", - "https://web.setup", - "https://web.setup.home", - } - - for _, localURL := range localURLs { - err := validateURL(localURL) - if err == nil { - t.Errorf("Expected an error for local URL %s, got nil", localURL) - } - assert.Equal(t, "local URLs are not allowed", err.Error()) - } -} diff --git a/views.go b/views.go deleted file mode 100644 index 4b1b88a..0000000 --- a/views.go +++ /dev/null @@ -1,199 +0,0 @@ -package main - -import ( - "io" - "log" - "net/http" - "strings" -) - -func IndexHandler(w http.ResponseWriter, _ *http.Request) { - - content := `

Feeds to archive

-

- Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived. -

-
- - -
-
-

You can also upload .opml files containing the feeds you wish to archive:

-
- - -
- ` - - FAQ := ` - -

FAQ

-
- What are web feeds? -

- Web feeds are a way to distribute content on the web. They allow users to access updates from websites without having to visit them directly. Feeds are typically used for news websites, blogs, and other sites that frequently update content. -
- You can read more about web feeds on Wikipedia. -

-
-
-
- What is FeedVault? -

- FeedVault is a service that archives web feeds. It allows users to access and search for historical content from various websites. The service is designed to preserve the history of the web and provide a reliable source for accessing content that may no longer be available on the original websites. -

-
-
-
- Why archive feeds? -

- Web feeds are a valuable source of information, and archiving them ensures that the content is preserved for future reference. By archiving feeds, we can ensure that historical content is available for research, analysis, and other purposes. Additionally, archiving feeds can help prevent the loss of valuable information due to website changes, outages, or other issues. -

-
-
-
- How does it work? -

- FeedVault is written in Go and uses the gofeed library to parse feeds. The service periodically checks for new content in the feeds and stores it in a database. Users can access the archived feeds through the website or API. -


-
-
- How can I access the archived feeds? -

- You can access the archived feeds through the website or API. The website provides a user interface for searching and browsing the feeds, while the API allows you to access the feeds programmatically. You can also download the feeds in various formats, such as JSON, XML, or RSS. -

-
- ` - - content += FAQ - - htmlData := HTMLData{ - Title: "FeedVault", - Description: "FeedVault - A feed archive", - Keywords: "RSS, Atom, Feed, Archive", - Author: "TheLovinator", - CanonicalURL: "http://localhost:8000/", - Content: content, - } - html := fullHTML(htmlData) - w.Write([]byte(html)) -} - -func ApiHandler(w http.ResponseWriter, _ *http.Request) { - htmlData := HTMLData{ - Title: "FeedVault API", - Description: "FeedVault API - A feed archive", - Keywords: "RSS, Atom, Feed, Archive, API", - Author: "TheLovinator", - CanonicalURL: "http://localhost:8000/api", - Content: "

Here be dragons.

", - } - html := fullHTML(htmlData) - w.Write([]byte(html)) -} -func FeedsHandler(w http.ResponseWriter, _ *http.Request) { - htmlData := HTMLData{ - Title: "FeedVault Feeds", - Description: "FeedVault Feeds - A feed archive", - Keywords: "RSS, Atom, Feed, Archive", - Author: "TheLovinator", - CanonicalURL: "http://localhost:8000/feeds", - Content: "

Here be feeds.

", - } - html := fullHTML(htmlData) - w.Write([]byte(html)) -} - -func AddFeedHandler(w http.ResponseWriter, r *http.Request) { - var parseErrors []ParseResult - - // Parse the form and get the URLs - r.ParseForm() - urls := r.Form.Get("urls") - if urls == "" { - http.Error(w, "No URLs provided", http.StatusBadRequest) - return - } - - for _, feed_url := range strings.Split(urls, "\n") { - // TODO: Try to upgrade to https if http is provided - - // Validate the URL - err := validateURL(feed_url) - if err != nil { - parseErrors = append(parseErrors, ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true}) - continue - } - - // "Add" the feed to the database - log.Println("Adding feed:", feed_url) - parseErrors = append(parseErrors, ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false}) - - } - htmlData := HTMLData{ - Title: "FeedVault", - Description: "FeedVault - A feed archive", - Keywords: "RSS, Atom, Feed, Archive", - Author: "TheLovinator", - CanonicalURL: "http://localhost:8000/", - Content: "

Feeds added.

", - ParseResult: parseErrors, - } - - html := fullHTML(htmlData) - w.Write([]byte(html)) -} - -func UploadOpmlHandler(w http.ResponseWriter, r *http.Request) { - // Parse the form and get the file - r.ParseMultipartForm(10 << 20) // 10 MB - file, _, err := r.FormFile("file") - if err != nil { - http.Error(w, "No file provided", http.StatusBadRequest) - return - } - defer file.Close() - - // Read the file - all, err := io.ReadAll(file) - if err != nil { - http.Error(w, "Failed to read file", http.StatusInternalServerError) - return - } - // Convert the file to a string - opml := string(all) - - // Parse the OPML file - parseResult := []ParseResult{} - links, err := ParseOpml(opml) - if err != nil { - parseResult = append(parseResult, ParseResult{FeedURL: "/upload_opml", Msg: err.Error(), IsError: true}) - } else { - // Add the feeds to the database - for _, feed_url := range links.XMLLinks { - log.Println("Adding feed:", feed_url) - - // Validate the URL - err := validateURL(feed_url) - if err != nil { - parseResult = append(parseResult, ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true}) - continue - } - - parseResult = append(parseResult, ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false}) - } - } - - // Return the results - htmlData := HTMLData{ - Title: "FeedVault", - Description: "FeedVault - A feed archive", - Keywords: "RSS, Atom, Feed, Archive", - Author: "TheLovinator", - CanonicalURL: "http://localhost:8000/", - Content: "

Feeds added.

", - ParseResult: parseResult, - } - html := fullHTML(htmlData) - w.Write([]byte(html)) -}