From ee8cc871963417580e8635d02768a94924399298 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20Helle=C5=9Ben?= Date: Fri, 13 Mar 2026 01:28:49 +0100 Subject: [PATCH] Move robots.txt to static file instead of Django --- config/tests/test_urls.py | 1 - config/urls.py | 3 +-- static/robots.xml | 5 ++++ twitch/tests/test_views.py | 54 -------------------------------------- twitch/views.py | 27 ------------------- 5 files changed, 6 insertions(+), 84 deletions(-) create mode 100644 static/robots.xml diff --git a/config/tests/test_urls.py b/config/tests/test_urls.py index 83738d1..8e04206 100644 --- a/config/tests/test_urls.py +++ b/config/tests/test_urls.py @@ -34,7 +34,6 @@ def _reload_urls_with(**overrides) -> ModuleType: def test_top_level_named_routes_available() -> None: """Top-level routes defined in `config.urls` are reversible.""" assert reverse("sitemap") == "/sitemap.xml" - assert reverse("robots") == "/robots.txt" # ensure the included `twitch` namespace is present assert reverse("twitch:dashboard") == "/" diff --git a/config/urls.py b/config/urls.py index 2c031ea..5e1405f 100644 --- a/config/urls.py +++ b/config/urls.py @@ -12,8 +12,7 @@ if TYPE_CHECKING: from django.urls.resolvers import URLResolver urlpatterns: list[URLPattern | URLResolver] = [ - path("sitemap.xml", twitch_views.sitemap_view, name="sitemap"), - path("robots.txt", twitch_views.robots_txt_view, name="robots"), + path(route="sitemap.xml", view=twitch_views.sitemap_view, name="sitemap"), path(route="", view=include("twitch.urls", namespace="twitch")), ] diff --git a/static/robots.xml b/static/robots.xml new file mode 100644 index 0000000..fc47f11 --- /dev/null +++ b/static/robots.xml @@ -0,0 +1,5 @@ +User-agent: * +Allow: / + +# Sitemap location +Sitemap: https://ttvdrops.lovinator.space/sitemap.xml diff --git a/twitch/tests/test_views.py b/twitch/tests/test_views.py index d28eca3..90b622b 100644 --- a/twitch/tests/test_views.py +++ b/twitch/tests/test_views.py @@ -1452,60 +1452,6 @@ class TestSitemapView: assert "" in content -@pytest.mark.django_db -class TestRobotsTxtView: - """Tests for the robots.txt view.""" - - def test_robots_txt_returns_text(self, client: Client) -> None: - """Test robots.txt view returns text content.""" - response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt") - assert response.status_code == 200 - assert response["Content-Type"] in {"text/plain", "text/plain; charset=utf-8"} - - def test_robots_txt_user_agent(self, client: Client) -> None: - """Test robots.txt contains user-agent.""" - response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt") - content: str = response.content.decode() - assert "User-agent: *" in content - - def test_robots_txt_allow_root(self, client: Client) -> None: - """Test robots.txt allows root path.""" - response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt") - content: str = response.content.decode() - assert "Allow: /" in content - - def test_robots_txt_disallow_admin(self, client: Client) -> None: - """Test robots.txt disallows /admin/.""" - response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt") - content: str = response.content.decode() - assert "Disallow: /admin/" in content - - def test_robots_txt_disallow_debug(self, client: Client) -> None: - """Test robots.txt disallows /debug/.""" - response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt") - content: str = response.content.decode() - assert "Disallow: /debug/" in content - - def test_robots_txt_disallow_datasets(self, client: Client) -> None: - """Test robots.txt disallows /datasets/.""" - response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt") - content: str = response.content.decode() - assert "Disallow: /datasets/" in content - - def test_robots_txt_sitemap_reference(self, client: Client) -> None: - """Test robots.txt references sitemap.""" - response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt") - content: str = response.content.decode() - assert "Sitemap:" in content - assert "/sitemap.xml" in content - - def test_robots_txt_disallow_export(self, client: Client) -> None: - """Test robots.txt disallows /export/.""" - response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt") - content: str = response.content.decode() - assert "Disallow: /export/" in content - - @pytest.mark.django_db class TestSEOPaginationLinks: """Tests for SEO pagination links in views.""" diff --git a/twitch/views.py b/twitch/views.py index 17904d1..2e6d8e7 100644 --- a/twitch/views.py +++ b/twitch/views.py @@ -2753,30 +2753,3 @@ def sitemap_view(request: HttpRequest) -> HttpResponse: # noqa: PLR0915 xml_content += "" return HttpResponse(xml_content, content_type="application/xml") - - -# MARK: /robots.txt -def robots_txt_view(request: HttpRequest) -> HttpResponse: - """Generate robots.txt for search engine crawlers. - - Args: - request: The HTTP request. - - Returns: - HttpResponse: robots.txt content. - """ - base_url: str = f"{request.scheme}://{request.get_host()}" - - robots_content: str = f"""User-agent: * -Allow: / -Disallow: /admin/ -Disallow: /debug/ -Disallow: /datasets/ -Disallow: /docs/rss/ -Disallow: /export/ - -# Sitemap location -Sitemap: {base_url}/sitemap.xml -""" - - return HttpResponse(robots_content, content_type="text/plain")