Add robots.txt

This commit is contained in:
Jake Howard 2022-08-19 14:35:38 +01:00
parent 24d92f4760
commit 1abc6da19b
Signed by: jake
GPG Key ID: 57AFB45680EDD477
4 changed files with 28 additions and 3 deletions

View File

@ -0,0 +1,5 @@
User-agent: *
Allow: /
Disallow: {% url "wagtailadmin_home" %}
Sitemap: {{ sitemap }}

View File

@ -1,4 +1,4 @@
from django.test import TestCase
from django.test import SimpleTestCase, TestCase
from django.urls import reverse
@ -12,3 +12,11 @@ class Error404PageTestCase(TestCase):
def test_queries(self) -> None:
with self.assertNumQueries(10):
self.client.get(self.url)
class RobotsViewTestCase(SimpleTestCase):
url = reverse("robotstxt")
def test_accessible(self) -> None:
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)

View File

@ -1,6 +1,7 @@
from typing import Any
from django.http.response import HttpResponse
from django.urls import reverse
from django.views.defaults import ERROR_404_TEMPLATE_NAME
from django.views.generic import TemplateView
@ -21,3 +22,13 @@ class Error404View(TemplateView):
page_not_found = Error404View.as_view()
class RobotsView(TemplateView):
template_name = "robots.txt"
content_type = "text/plain"
def get_context_data(self, **kwargs: dict) -> dict:
context = super().get_context_data(**kwargs)
context["sitemap"] = self.request.build_absolute_uri(reverse("sitemap"))
return context

View File

@ -6,7 +6,7 @@ from wagtail.contrib.sitemaps.views import sitemap
from wagtail.documents import urls as wagtaildocs_urls
from wagtail.images.views.serve import ServeView
from website.common.views import page_not_found
from website.common.views import RobotsView, page_not_found
urlpatterns = [
path("admin/", include(wagtailadmin_urls)),
@ -20,7 +20,8 @@ urlpatterns = [
ServeView.as_view(action="redirect"),
name="wagtailimages_serve",
),
path("sitemap.xml", sitemap),
path("sitemap.xml", sitemap, name="sitemap"),
path("robots.txt", RobotsView.as_view(), name="robotstxt"),
path("404/", page_not_found, name="404"),
]