Move everything back to root dir

This commit is contained in:
Joakim Hellsén 2024-02-14 04:40:48 +01:00
commit 1fa8351d11
19 changed files with 257 additions and 300 deletions

View file

@ -5,7 +5,7 @@ tmp_dir = "tmp"
[build] [build]
args_bin = [] args_bin = []
bin = "tmp\\main.exe" bin = "tmp\\main.exe"
cmd = "go build -o ./tmp/main.exe ./cmd/feedvault/" cmd = "go build -o ./tmp/main.exe ."
delay = 1000 delay = 1000
exclude_dir = [ exclude_dir = [
"assets", "assets",
@ -15,7 +15,6 @@ exclude_dir = [
"testdata", "testdata",
"tests", "tests",
".git", ".git",
".idea",
".vscode", ".vscode",
] ]
exclude_file = [] exclude_file = []

View file

@ -33,4 +33,4 @@ jobs:
run: go mod tidy run: go mod tidy
- name: Build - name: Build
run: go build -v -ldflags="-s -w" .\cmd\feedvault\ run: go build -v -ldflags="-s -w" .

View file

@ -1,4 +1,4 @@
package feeds package main
import ( import (
"context" "context"

208
handlers.go Normal file
View file

@ -0,0 +1,208 @@
package main
import (
"io"
"log"
"net/http"
"strings"
)
func ApiHandler(w http.ResponseWriter, _ *http.Request) {
htmlData := HTMLData{
Title: "FeedVault API",
Description: "FeedVault API - A feed archive",
Keywords: "RSS, Atom, Feed, Archive, API",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/api",
Content: "<p>Here be dragons.</p>",
}
html := FullHTML(htmlData)
w.Write([]byte(html))
}
func FeedsHandler(w http.ResponseWriter, _ *http.Request) {
htmlData := HTMLData{
Title: "FeedVault Feeds",
Description: "FeedVault Feeds - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/feeds",
Content: "<p>Here be </p>",
}
html := FullHTML(htmlData)
w.Write([]byte(html))
}
func AddFeedHandler(w http.ResponseWriter, r *http.Request) {
var parseErrors []ParseResult
// Parse the form and get the URLs
err := r.ParseForm()
if err != nil {
http.Error(w, "Error parsing form", http.StatusInternalServerError)
return
}
urls := r.Form.Get("urls")
if urls == "" {
http.Error(w, "No URLs provided", http.StatusBadRequest)
return
}
for _, feed_url := range strings.Split(urls, "\n") {
// TODO: Try to upgrade to https if http is provided
// Validate the URL
err := ValidateFeedURL(feed_url)
if err != nil {
parseErrors = append(parseErrors, ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
err = AddFeedToDB(feed_url)
if err != nil {
parseErrors = append(parseErrors, ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
// Feed was added successfully
parseErrors = append(parseErrors, ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false})
}
htmlData := HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: "<p>Feeds added.</p>",
ParseResult: parseErrors,
}
html := FullHTML(htmlData)
w.Write([]byte(html))
}
func IndexHandler(w http.ResponseWriter, _ *http.Request) {
content := `<h2>Feeds to archive</h2>
<p>
Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived.
</p>
<form action="/add" method="post">
<textarea id="urls" name="urls" rows="5" cols="50" required></textarea>
<button type="submit">Add feeds</button>
</form>
<br>
<p>You can also upload .opml files containing the feeds you wish to archive:</p>
<form enctype="multipart/form-data" method="post" action="/upload_opml">
<input type="file" name="file" id="file" accept=".opml" required>
<button type="submit">Upload OPML</button>
</form>
`
FAQ := `
<h2>FAQ</h2>
<details>
<summary>What are web feeds?</summary>
<p>
Web feeds are a way to distribute content on the web. They allow users to access updates from websites without having to visit them directly. Feeds are typically used for news websites, blogs, and other sites that frequently update content.
<br>
You can read more about web feeds on <a href="https://en.wikipedia.org/wiki/Web_feed">Wikipedia</a>.
</p>
<hr>
</details>
<details>
<summary>What is FeedVault?</summary>
<p>
FeedVault is a service that archives web It allows users to access and search for historical content from various websites. The service is designed to preserve the history of the web and provide a reliable source for accessing content that may no longer be available on the original websites.
</p>
<hr>
</details>
<details>
<summary>Why archive feeds?</summary>
<p>
Web feeds are a valuable source of information, and archiving them ensures that the content is preserved for future reference. By archiving feeds, we can ensure that historical content is available for research, analysis, and other purposes. Additionally, archiving feeds can help prevent the loss of valuable information due to website changes, outages, or other issues.
</p>
<hr>
</details>
<details>
<summary>How does it work?</summary>
<p>
FeedVault is written in Go and uses the <a href="https://github.com/mmcdole/gofeed">gofeed</a> library to parse The service periodically checks for new content in the feeds and stores it in a database. Users can access the archived feeds through the website or API.
<hr>
</details>
<details>
<summary>How can I access the archived feeds?</summary>
<p>
You can access the archived feeds through the website or API. The website provides a user interface for searching and browsing the feeds, while the API allows you to access the feeds programmatically. You can also download the feeds in various formats, such as JSON, XML, or RSS.
</p>
</details>
`
content += FAQ
htmlData := HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: content,
}
html := FullHTML(htmlData)
w.Write([]byte(html))
}
func UploadOpmlHandler(w http.ResponseWriter, r *http.Request) {
// Parse the form and get the file
r.ParseMultipartForm(10 << 20) // 10 MB
file, _, err := r.FormFile("file")
if err != nil {
http.Error(w, "No file provided", http.StatusBadRequest)
return
}
defer file.Close()
// Read the file
all, err := io.ReadAll(file)
if err != nil {
http.Error(w, "Failed to read file", http.StatusInternalServerError)
return
}
// Parse the OPML file
parseResult := []ParseResult{}
links, err := ParseOpml(string(all))
if err != nil {
parseResult = append(parseResult, ParseResult{FeedURL: "/upload_opml", Msg: err.Error(), IsError: true})
} else {
// Add the feeds to the database
for _, feed_url := range links.XMLLinks {
log.Println("Adding feed:", feed_url)
// Validate the URL
err := ValidateFeedURL(feed_url)
if err != nil {
parseResult = append(parseResult, ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
parseResult = append(parseResult, ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false})
}
}
// Return the results
htmlData := HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: "<p>Feeds added.</p>",
ParseResult: parseResult,
}
html := FullHTML(htmlData)
w.Write([]byte(html))
}

View file

@ -5,8 +5,6 @@ import (
"net/http/httptest" "net/http/httptest"
"strings" "strings"
"testing" "testing"
"github.com/TheLovinator1/FeedVault/pkg/handlers"
) )
func TestIndexHandler(t *testing.T) { func TestIndexHandler(t *testing.T) {
@ -18,7 +16,7 @@ func TestIndexHandler(t *testing.T) {
// We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response. // We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response.
rr := httptest.NewRecorder() rr := httptest.NewRecorder()
handler := http.HandlerFunc(handlers.IndexHandler) handler := http.HandlerFunc(IndexHandler)
// Our handlers satisfy http.Handler, so we can call their ServeHTTP method // Our handlers satisfy http.Handler, so we can call their ServeHTTP method
// directly and pass in our Request and ResponseRecorder. // directly and pass in our Request and ResponseRecorder.
@ -48,7 +46,7 @@ func TestApiHandler(t *testing.T) {
// We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response. // We create a ResponseRecorder (which satisfies http.ResponseWriter) to record the response.
rr := httptest.NewRecorder() rr := httptest.NewRecorder()
handler := http.HandlerFunc(handlers.ApiHandler) handler := http.HandlerFunc(ApiHandler)
// Our handlers satisfy http.Handler, so we can call their ServeHTTP method // Our handlers satisfy http.Handler, so we can call their ServeHTTP method
// directly and pass in our Request and ResponseRecorder. // directly and pass in our Request and ResponseRecorder.

View file

@ -1,14 +1,10 @@
package html package main
import ( import (
"fmt" "fmt"
"log" "log"
"math/rand" "math/rand"
"strings" "strings"
"github.com/TheLovinator1/FeedVault/pkg/models"
"github.com/TheLovinator1/FeedVault/pkg/quotes"
"github.com/TheLovinator1/FeedVault/pkg/stats"
) )
type HTMLData struct { type HTMLData struct {
@ -18,7 +14,7 @@ type HTMLData struct {
Author string Author string
CanonicalURL string CanonicalURL string
Content string Content string
ParseResult []models.ParseResult ParseResult []ParseResult
} }
var style = ` var style = `
@ -150,7 +146,7 @@ const (
</html>` </html>`
) )
func buildErrorList(parseResults []models.ParseResult) string { func buildErrorList(parseResults []ParseResult) string {
var errorBuilder strings.Builder var errorBuilder strings.Builder
if len(parseResults) > 0 { if len(parseResults) > 0 {
errorBuilder.WriteString("<ul>") errorBuilder.WriteString("<ul>")
@ -170,12 +166,12 @@ func buildErrorList(parseResults []models.ParseResult) string {
func FullHTML(h HTMLData) string { func FullHTML(h HTMLData) string {
statusMsg := buildErrorList(h.ParseResult) statusMsg := buildErrorList(h.ParseResult)
feedCount := 0 feedCount := 0
databaseSize, err := stats.GetDBSize() databaseSize, err := GetDBSize()
if err != nil { if err != nil {
databaseSize = "0 KiB" databaseSize = "0 KiB"
log.Println("Error getting database size:", err) log.Println("Error getting database size:", err)
} }
funMsg := quotes.FunMsg[rand.Intn(len(quotes.FunMsg))] funMsg := FunMsg[rand.Intn(len(FunMsg))]
return fmt.Sprintf(htmlTemplate, h.Description, h.Keywords, h.Author, h.CanonicalURL, h.Title, style, statusMsg, feedCount, databaseSize, h.Content, funMsg) return fmt.Sprintf(htmlTemplate, h.Description, h.Keywords, h.Author, h.CanonicalURL, h.Title, style, statusMsg, feedCount, databaseSize, h.Content, funMsg)
} }

View file

@ -3,25 +3,22 @@ package main
import ( import (
"strings" "strings"
"testing" "testing"
"github.com/TheLovinator1/FeedVault/pkg/html"
"github.com/TheLovinator1/FeedVault/pkg/models"
) )
// Displays error messages if there are any parse errors // Displays error messages if there are any parse errors
func TestErrorMessages(t *testing.T) { func TestErrorMessages(t *testing.T) {
// Initialize test data // Initialize test data
parseResult := []models.ParseResult{ parseResult := []ParseResult{
{IsError: true, Msg: "Error 1"}, {IsError: true, Msg: "Error 1"},
{IsError: true, Msg: "Error 2"}, {IsError: true, Msg: "Error 2"},
} }
h := html.HTMLData{ h := HTMLData{
ParseResult: parseResult, ParseResult: parseResult,
} }
// Invoke function under test // Invoke function under test
result := html.FullHTML(h) result := FullHTML(h)
// Assert that the result contains the error messages // Assert that the result contains the error messages
if !strings.Contains(result, "Error 1") || !strings.Contains(result, "Error 2") { if !strings.Contains(result, "Error 1") || !strings.Contains(result, "Error 2") {

View file

@ -4,8 +4,6 @@ import (
"log" "log"
"net/http" "net/http"
"github.com/TheLovinator1/FeedVault/pkg/handlers"
) )
func init() { log.SetFlags(log.LstdFlags | log.Lshortfile) } func init() { log.SetFlags(log.LstdFlags | log.Lshortfile) }
@ -17,11 +15,11 @@ func main() {
mux := http.NewServeMux() mux := http.NewServeMux()
// Routes // Routes
mux.HandleFunc("/", handlers.IndexHandler) mux.HandleFunc("/", IndexHandler)
mux.HandleFunc("/api", handlers.ApiHandler) mux.HandleFunc("/api", ApiHandler)
mux.HandleFunc("/feeds", handlers.FeedsHandler) mux.HandleFunc("/feeds", FeedsHandler)
mux.HandleFunc("/add", handlers.AddFeedHandler) mux.HandleFunc("/add", AddFeedHandler)
mux.HandleFunc("/upload_opml", handlers.UploadOpmlHandler) mux.HandleFunc("/upload_opml", UploadOpmlHandler)
// Create server // Create server
server := &http.Server{ server := &http.Server{

View file

@ -1,4 +1,4 @@
package models package main
type ParseResult struct { type ParseResult struct {
FeedURL string FeedURL string

View file

@ -1,4 +1,4 @@
package opml package main
import "encoding/xml" import "encoding/xml"

View file

@ -2,8 +2,6 @@ package main
import ( import (
"testing" "testing"
"github.com/TheLovinator1/FeedVault/pkg/opml"
) )
var opmlExample = `<?xml version="1.0" encoding="utf-8"?> var opmlExample = `<?xml version="1.0" encoding="utf-8"?>
@ -34,7 +32,7 @@ var secondOpmlExample = `<?xml version="1.0" encoding="UTF-8"?>
// Test the opml parser // Test the opml parser
func TestParseOpml(t *testing.T) { func TestParseOpml(t *testing.T) {
links, err := opml.ParseOpml(opmlExample) links, err := ParseOpml(opmlExample)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
@ -46,8 +44,8 @@ func TestParseOpml(t *testing.T) {
} }
// Test that the links are unique // Test that the links are unique
links.XMLLinks = opml.RemoveDuplicates(links.XMLLinks) links.XMLLinks = RemoveDuplicates(links.XMLLinks)
links.HTMLLinks = opml.RemoveDuplicates(links.HTMLLinks) links.HTMLLinks = RemoveDuplicates(links.HTMLLinks)
if len(links.XMLLinks) != 2 { if len(links.XMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.XMLLinks)) t.Errorf("Expected 2 links, got %d", len(links.XMLLinks))
} }
@ -73,7 +71,7 @@ func TestParseOpml(t *testing.T) {
// Test the opml parser with nested outlines // Test the opml parser with nested outlines
func TestParseOpmlNested(t *testing.T) { func TestParseOpmlNested(t *testing.T) {
links, err := opml.ParseOpml(secondOpmlExample) links, err := ParseOpml(secondOpmlExample)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
@ -85,8 +83,8 @@ func TestParseOpmlNested(t *testing.T) {
} }
// Test that the links are unique // Test that the links are unique
links.XMLLinks = opml.RemoveDuplicates(links.XMLLinks) links.XMLLinks = RemoveDuplicates(links.XMLLinks)
links.HTMLLinks = opml.RemoveDuplicates(links.HTMLLinks) links.HTMLLinks = RemoveDuplicates(links.HTMLLinks)
if len(links.XMLLinks) != 2 { if len(links.XMLLinks) != 2 {
t.Errorf("Expected 2 links, got %d", len(links.XMLLinks)) t.Errorf("Expected 2 links, got %d", len(links.XMLLinks))
} }

View file

@ -1,20 +0,0 @@
package handlers
import (
"net/http"
"github.com/TheLovinator1/FeedVault/pkg/html"
)
func ApiHandler(w http.ResponseWriter, _ *http.Request) {
htmlData := html.HTMLData{
Title: "FeedVault API",
Description: "FeedVault API - A feed archive",
Keywords: "RSS, Atom, Feed, Archive, API",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/api",
Content: "<p>Here be dragons.</p>",
}
html := html.FullHTML(htmlData)
w.Write([]byte(html))
}

View file

@ -1,74 +0,0 @@
package handlers
import (
"net/http"
"strings"
"github.com/TheLovinator1/FeedVault/pkg/feeds"
"github.com/TheLovinator1/FeedVault/pkg/html"
"github.com/TheLovinator1/FeedVault/pkg/models"
"github.com/TheLovinator1/FeedVault/pkg/validate"
)
func FeedsHandler(w http.ResponseWriter, _ *http.Request) {
htmlData := html.HTMLData{
Title: "FeedVault Feeds",
Description: "FeedVault Feeds - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/feeds",
Content: "<p>Here be feeds.</p>",
}
html := html.FullHTML(htmlData)
w.Write([]byte(html))
}
func AddFeedHandler(w http.ResponseWriter, r *http.Request) {
var parseErrors []models.ParseResult
// Parse the form and get the URLs
err := r.ParseForm()
if err != nil {
http.Error(w, "Error parsing form", http.StatusInternalServerError)
return
}
urls := r.Form.Get("urls")
if urls == "" {
http.Error(w, "No URLs provided", http.StatusBadRequest)
return
}
for _, feed_url := range strings.Split(urls, "\n") {
// TODO: Try to upgrade to https if http is provided
// Validate the URL
err := validate.ValidateFeedURL(feed_url)
if err != nil {
parseErrors = append(parseErrors, models.ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
err = feeds.AddFeedToDB(feed_url)
if err != nil {
parseErrors = append(parseErrors, models.ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
// Feed was added successfully
parseErrors = append(parseErrors, models.ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false})
}
htmlData := html.HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: "<p>Feeds added.</p>",
ParseResult: parseErrors,
}
html := html.FullHTML(htmlData)
w.Write([]byte(html))
}

View file

@ -1,79 +0,0 @@
package handlers
import (
"net/http"
"github.com/TheLovinator1/FeedVault/pkg/html"
)
func IndexHandler(w http.ResponseWriter, _ *http.Request) {
content := `<h2>Feeds to archive</h2>
<p>
Input the URLs of the feeds you wish to archive below. You can add as many as needed, and access them through the website or API. Alternatively, include links to .opml files, and the feeds within will be archived.
</p>
<form action="/add" method="post">
<textarea id="urls" name="urls" rows="5" cols="50" required></textarea>
<button type="submit">Add feeds</button>
</form>
<br>
<p>You can also upload .opml files containing the feeds you wish to archive:</p>
<form enctype="multipart/form-data" method="post" action="/upload_opml">
<input type="file" name="file" id="file" accept=".opml" required>
<button type="submit">Upload OPML</button>
</form>
`
FAQ := `
<h2>FAQ</h2>
<details>
<summary>What are web feeds?</summary>
<p>
Web feeds are a way to distribute content on the web. They allow users to access updates from websites without having to visit them directly. Feeds are typically used for news websites, blogs, and other sites that frequently update content.
<br>
You can read more about web feeds on <a href="https://en.wikipedia.org/wiki/Web_feed">Wikipedia</a>.
</p>
<hr>
</details>
<details>
<summary>What is FeedVault?</summary>
<p>
FeedVault is a service that archives web feeds. It allows users to access and search for historical content from various websites. The service is designed to preserve the history of the web and provide a reliable source for accessing content that may no longer be available on the original websites.
</p>
<hr>
</details>
<details>
<summary>Why archive feeds?</summary>
<p>
Web feeds are a valuable source of information, and archiving them ensures that the content is preserved for future reference. By archiving feeds, we can ensure that historical content is available for research, analysis, and other purposes. Additionally, archiving feeds can help prevent the loss of valuable information due to website changes, outages, or other issues.
</p>
<hr>
</details>
<details>
<summary>How does it work?</summary>
<p>
FeedVault is written in Go and uses the <a href="https://github.com/mmcdole/gofeed">gofeed</a> library to parse feeds. The service periodically checks for new content in the feeds and stores it in a database. Users can access the archived feeds through the website or API.
<hr>
</details>
<details>
<summary>How can I access the archived feeds?</summary>
<p>
You can access the archived feeds through the website or API. The website provides a user interface for searching and browsing the feeds, while the API allows you to access the feeds programmatically. You can also download the feeds in various formats, such as JSON, XML, or RSS.
</p>
</details>
`
content += FAQ
htmlData := html.HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: content,
}
html := html.FullHTML(htmlData)
w.Write([]byte(html))
}

View file

@ -1,63 +0,0 @@
package handlers
import (
"io"
"log"
"net/http"
"github.com/TheLovinator1/FeedVault/pkg/html"
"github.com/TheLovinator1/FeedVault/pkg/models"
"github.com/TheLovinator1/FeedVault/pkg/opml"
"github.com/TheLovinator1/FeedVault/pkg/validate"
)
func UploadOpmlHandler(w http.ResponseWriter, r *http.Request) {
// Parse the form and get the file
r.ParseMultipartForm(10 << 20) // 10 MB
file, _, err := r.FormFile("file")
if err != nil {
http.Error(w, "No file provided", http.StatusBadRequest)
return
}
defer file.Close()
// Read the file
all, err := io.ReadAll(file)
if err != nil {
http.Error(w, "Failed to read file", http.StatusInternalServerError)
return
}
// Parse the OPML file
parseResult := []models.ParseResult{}
links, err := opml.ParseOpml(string(all))
if err != nil {
parseResult = append(parseResult, models.ParseResult{FeedURL: "/upload_opml", Msg: err.Error(), IsError: true})
} else {
// Add the feeds to the database
for _, feed_url := range links.XMLLinks {
log.Println("Adding feed:", feed_url)
// Validate the URL
err := validate.ValidateFeedURL(feed_url)
if err != nil {
parseResult = append(parseResult, models.ParseResult{FeedURL: feed_url, Msg: err.Error(), IsError: true})
continue
}
parseResult = append(parseResult, models.ParseResult{FeedURL: feed_url, Msg: "Added", IsError: false})
}
}
// Return the results
htmlData := html.HTMLData{
Title: "FeedVault",
Description: "FeedVault - A feed archive",
Keywords: "RSS, Atom, Feed, Archive",
Author: "TheLovinator",
CanonicalURL: "http://localhost:8000/",
Content: "<p>Feeds added.</p>",
ParseResult: parseResult,
}
html := html.FullHTML(htmlData)
w.Write([]byte(html))
}

View file

@ -1,4 +1,5 @@
package quotes package main
// "Fun" messages that will be displayed in the footer // "Fun" messages that will be displayed in the footer
var FunMsg = []string{ var FunMsg = []string{

View file

@ -1,4 +1,4 @@
package stats package main
import ( import (
"fmt" "fmt"

View file

@ -1,4 +1,4 @@
package validate package main
import ( import (
"errors" "errors"

View file

@ -2,14 +2,12 @@ package main
import ( import (
"testing" "testing"
"github.com/TheLovinator1/FeedVault/pkg/validate"
) )
// URL starts with http:// // URL starts with http://
func TestURLStartsWithHTTP(t *testing.T) { func TestURLStartsWithHTTP(t *testing.T) {
url := "http://example.com" url := "http://example.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err != nil { if err != nil {
t.Errorf("Expected no error, got %v", err) t.Errorf("Expected no error, got %v", err)
} }
@ -18,7 +16,7 @@ func TestURLStartsWithHTTP(t *testing.T) {
// URL starts with https:// // URL starts with https://
func TestURLStartsWithHTTPS(t *testing.T) { func TestURLStartsWithHTTPS(t *testing.T) {
url := "https://example.com" url := "https://example.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err != nil { if err != nil {
t.Errorf("Expected no error, got %v", err) t.Errorf("Expected no error, got %v", err)
} }
@ -27,7 +25,7 @@ func TestURLStartsWithHTTPS(t *testing.T) {
// URL contains a valid domain // URL contains a valid domain
func TestURLContainsValidDomain(t *testing.T) { func TestURLContainsValidDomain(t *testing.T) {
url := "http://example.com" url := "http://example.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err != nil { if err != nil {
t.Errorf("Expected no error, got %v", err) t.Errorf("Expected no error, got %v", err)
} }
@ -36,7 +34,7 @@ func TestURLContainsValidDomain(t *testing.T) {
// URL is empty // URL is empty
func TestURLEmpty(t *testing.T) { func TestURLEmpty(t *testing.T) {
url := "" url := ""
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" { } else if err.Error() != "URL must start with http:// or https://" {
@ -47,7 +45,7 @@ func TestURLEmpty(t *testing.T) {
// URL does not contain a domain // URL does not contain a domain
func TestURLNotNumbers(t *testing.T) { func TestURLNotNumbers(t *testing.T) {
url := "12345" url := "12345"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" { } else if err.Error() != "URL must start with http:// or https://" {
@ -58,7 +56,7 @@ func TestURLNotNumbers(t *testing.T) {
// URL is not a valid URL // URL is not a valid URL
func TestURLNotValidURL(t *testing.T) { func TestURLNotValidURL(t *testing.T) {
url := "example.com" url := "example.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" { } else if err.Error() != "URL must start with http:// or https://" {
@ -69,7 +67,7 @@ func TestURLNotValidURL(t *testing.T) {
// Domain is resolvable // Domain is resolvable
func TestDomainIsResolvable(t *testing.T) { func TestDomainIsResolvable(t *testing.T) {
url := "http://example.com" url := "http://example.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err != nil { if err != nil {
t.Errorf("Expected no error, got %v", err) t.Errorf("Expected no error, got %v", err)
} }
@ -78,7 +76,7 @@ func TestDomainIsResolvable(t *testing.T) {
// Domain does not end with .local // Domain does not end with .local
func TestDomainDoesNotEndWithLocal(t *testing.T) { func TestDomainDoesNotEndWithLocal(t *testing.T) {
url := "http://example.com" url := "http://example.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err != nil { if err != nil {
t.Errorf("Expected no error, got %v", err) t.Errorf("Expected no error, got %v", err)
} }
@ -87,7 +85,7 @@ func TestDomainDoesNotEndWithLocal(t *testing.T) {
// Domain is not localhost // Domain is not localhost
func TestDomainIsNotLocalhost(t *testing.T) { func TestDomainIsNotLocalhost(t *testing.T) {
url := "http://example.com" url := "http://example.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err != nil { if err != nil {
t.Errorf("Expected no error, got %v", err) t.Errorf("Expected no error, got %v", err)
} }
@ -96,7 +94,7 @@ func TestDomainIsNotLocalhost(t *testing.T) {
// Domain is not an IP address // Domain is not an IP address
func TestDomainIsNotIPAddress(t *testing.T) { func TestDomainIsNotIPAddress(t *testing.T) {
url := "http://example.com" url := "http://example.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err != nil { if err != nil {
t.Errorf("Expected no error, got %v", err) t.Errorf("Expected no error, got %v", err)
} }
@ -105,7 +103,7 @@ func TestDomainIsNotIPAddress(t *testing.T) {
// URL is a file path // URL is a file path
func TestURLIsFilePath(t *testing.T) { func TestURLIsFilePath(t *testing.T) {
url := "/path/to/file" url := "/path/to/file"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" { } else if err.Error() != "URL must start with http:// or https://" {
@ -116,7 +114,7 @@ func TestURLIsFilePath(t *testing.T) {
// URL is a relative path // URL is a relative path
func TestURLIsRelativePath(t *testing.T) { func TestURLIsRelativePath(t *testing.T) {
url := "/path/to/resource" url := "/path/to/resource"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" { } else if err.Error() != "URL must start with http:// or https://" {
@ -127,7 +125,7 @@ func TestURLIsRelativePath(t *testing.T) {
// URL is a non-existent domain // URL is a non-existent domain
func TestNonExistentDomainURL(t *testing.T) { func TestNonExistentDomainURL(t *testing.T) {
url := "http://jfsalksajlkfsajklfsajklfllfjffffkfsklslsksassflfskjlfjlfsjkalfsaf.com" url := "http://jfsalksajlkfsajklfsajklfllfjffffkfsklslsksassflfskjlfjlfsjkalfsaf.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "failed to resolve domain" { } else if err.Error() != "failed to resolve domain" {
@ -138,7 +136,7 @@ func TestNonExistentDomainURL(t *testing.T) {
// URL is a malformed URL // URL is a malformed URL
func TestMalformedURL(t *testing.T) { func TestMalformedURL(t *testing.T) {
url := "malformedurl" url := "malformedurl"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" { } else if err.Error() != "URL must start with http:// or https://" {
@ -149,7 +147,7 @@ func TestMalformedURL(t *testing.T) {
// URL is a domain that does not support HTTP/HTTPS // URL is a domain that does not support HTTP/HTTPS
func TestURLDomainNotSupportHTTP(t *testing.T) { func TestURLDomainNotSupportHTTP(t *testing.T) {
url := "ftp://example.com" url := "ftp://example.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "URL must start with http:// or https://" { } else if err.Error() != "URL must start with http:// or https://" {
@ -160,7 +158,7 @@ func TestURLDomainNotSupportHTTP(t *testing.T) {
// URL is an unreachable domain // URL is an unreachable domain
func TestUnreachableDomain(t *testing.T) { func TestUnreachableDomain(t *testing.T) {
url := "http://fafsffsfsfsfsafsasafassfs.com" url := "http://fafsffsfsfsfsafsasafassfs.com"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "failed to resolve domain" { } else if err.Error() != "failed to resolve domain" {
@ -171,7 +169,7 @@ func TestUnreachableDomain(t *testing.T) {
// URL is an IP address // URL is an IP address
func TestURLIsIPAddress(t *testing.T) { func TestURLIsIPAddress(t *testing.T) {
url := "http://84.55.107.42" url := "http://84.55.107.42"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "IP address URLs are not allowed" { } else if err.Error() != "IP address URLs are not allowed" {
@ -182,7 +180,7 @@ func TestURLIsIPAddress(t *testing.T) {
// URL ends with .local // URL ends with .local
func TestURLEndsWithLocal(t *testing.T) { func TestURLEndsWithLocal(t *testing.T) {
url := "http://example.local" url := "http://example.local"
err := validate.ValidateFeedURL(url) err := ValidateFeedURL(url)
if err == nil { if err == nil {
t.Error("Expected an error, got nil") t.Error("Expected an error, got nil")
} else if err.Error() != "URLs ending with .local are not allowed" { } else if err.Error() != "URLs ending with .local are not allowed" {
@ -238,7 +236,7 @@ func TestLocalURLs(t *testing.T) {
} }
for _, localURL := range localURLs { for _, localURL := range localURLs {
err := validate.ValidateFeedURL(localURL) err := ValidateFeedURL(localURL)
if err == nil { if err == nil {
t.Errorf("Expected an error for local URL %s, got nil", localURL) t.Errorf("Expected an error for local URL %s, got nil", localURL)
} else if err.Error() != "local URLs are not allowed" { } else if err.Error() != "local URLs are not allowed" {