Move robots.txt to static file instead of Django

This commit is contained in:
Joakim Hellsén 2026-03-13 01:28:49 +01:00
commit ee8cc87196
Signed by: Joakim Hellsén
SSH key fingerprint: SHA256:/9h/CsExpFp+PRhsfA0xznFx2CGfTT5R/kpuFfUgEQk
5 changed files with 6 additions and 84 deletions

View file

@ -1452,60 +1452,6 @@ class TestSitemapView:
assert "<lastmod>" in content
@pytest.mark.django_db
class TestRobotsTxtView:
"""Tests for the robots.txt view."""
def test_robots_txt_returns_text(self, client: Client) -> None:
"""Test robots.txt view returns text content."""
response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt")
assert response.status_code == 200
assert response["Content-Type"] in {"text/plain", "text/plain; charset=utf-8"}
def test_robots_txt_user_agent(self, client: Client) -> None:
"""Test robots.txt contains user-agent."""
response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt")
content: str = response.content.decode()
assert "User-agent: *" in content
def test_robots_txt_allow_root(self, client: Client) -> None:
"""Test robots.txt allows root path."""
response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt")
content: str = response.content.decode()
assert "Allow: /" in content
def test_robots_txt_disallow_admin(self, client: Client) -> None:
"""Test robots.txt disallows /admin/."""
response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt")
content: str = response.content.decode()
assert "Disallow: /admin/" in content
def test_robots_txt_disallow_debug(self, client: Client) -> None:
"""Test robots.txt disallows /debug/."""
response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt")
content: str = response.content.decode()
assert "Disallow: /debug/" in content
def test_robots_txt_disallow_datasets(self, client: Client) -> None:
"""Test robots.txt disallows /datasets/."""
response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt")
content: str = response.content.decode()
assert "Disallow: /datasets/" in content
def test_robots_txt_sitemap_reference(self, client: Client) -> None:
"""Test robots.txt references sitemap."""
response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt")
content: str = response.content.decode()
assert "Sitemap:" in content
assert "/sitemap.xml" in content
def test_robots_txt_disallow_export(self, client: Client) -> None:
"""Test robots.txt disallows /export/."""
response: _MonkeyPatchedWSGIResponse = client.get("/robots.txt")
content: str = response.content.decode()
assert "Disallow: /export/" in content
@pytest.mark.django_db
class TestSEOPaginationLinks:
"""Tests for SEO pagination links in views."""

View file

@ -2753,30 +2753,3 @@ def sitemap_view(request: HttpRequest) -> HttpResponse: # noqa: PLR0915
xml_content += "</urlset>"
return HttpResponse(xml_content, content_type="application/xml")
# MARK: /robots.txt
def robots_txt_view(request: HttpRequest) -> HttpResponse:
"""Generate robots.txt for search engine crawlers.
Args:
request: The HTTP request.
Returns:
HttpResponse: robots.txt content.
"""
base_url: str = f"{request.scheme}://{request.get_host()}"
robots_content: str = f"""User-agent: *
Allow: /
Disallow: /admin/
Disallow: /debug/
Disallow: /datasets/
Disallow: /docs/rss/
Disallow: /export/
# Sitemap location
Sitemap: {base_url}/sitemap.xml
"""
return HttpResponse(robots_content, content_type="text/plain")