Add a way to disable SEO indexing

Well, "disable"
This commit is contained in:
Jake Howard 2022-08-19 15:31:27 +01:00
parent cd4252cd64
commit af6449c9f8
Signed by: jake
GPG key ID: 57AFB45680EDD477
5 changed files with 28 additions and 1 deletions

View file

@ -13,6 +13,9 @@
</title> </title>
<meta name="description" content="" /> <meta name="description" content="" />
<meta name="viewport" content="width=device-width, initial-scale=1" /> <meta name="viewport" content="width=device-width, initial-scale=1" />
{% if not SEO_INDEX %}
<meta name="robots" content="noindex" />
{% endif %}
{% block extra_head %}{% endblock %} {% block extra_head %}{% endblock %}
<link rel="stylesheet" type="text/css" href="{% static 'css/base.css' %}"> <link rel="stylesheet" type="text/css" href="{% static 'css/base.css' %}">

View file

@ -1,5 +1,10 @@
{% if SEO_INDEX %}
User-agent: * User-agent: *
Allow: / Allow: /
Disallow: {% url "wagtailadmin_home" %} Disallow: {% url "wagtailadmin_home" %}
{% else %}
User-agent: *
Disallow: /
{% endif %}
Sitemap: {{ sitemap }} Sitemap: {{ sitemap }}

View file

@ -1,4 +1,4 @@
from django.test import SimpleTestCase, TestCase from django.test import SimpleTestCase, TestCase, override_settings
from django.urls import reverse from django.urls import reverse
@ -23,10 +23,20 @@ class Error404PageTestCase(TestCase):
class RobotsViewTestCase(SimpleTestCase): class RobotsViewTestCase(SimpleTestCase):
url = reverse("robotstxt") url = reverse("robotstxt")
@override_settings(SEO_INDEX=True)
def test_accessible(self) -> None: def test_accessible(self) -> None:
response = self.client.get(self.url) response = self.client.get(self.url)
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["sitemap"], "http://testserver/sitemap.xml") self.assertEqual(response.context["sitemap"], "http://testserver/sitemap.xml")
self.assertContains(response, "Allow: /")
self.assertTrue(response.context["SEO_INDEX"])
@override_settings(SEO_INDEX=False)
def test_disallow(self) -> None:
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Disallow: /")
self.assertFalse(response.context["SEO_INDEX"])
class SecurityViewTestCase(TestCase): class SecurityViewTestCase(TestCase):

View file

@ -10,6 +10,7 @@ env = environ.Env(
BASE_HOSTNAME=(str, "example.com"), BASE_HOSTNAME=(str, "example.com"),
UNSPLASH_CLIENT_ID=(str, ""), UNSPLASH_CLIENT_ID=(str, ""),
SPOTIFY_PROXY_HOST=(str, ""), SPOTIFY_PROXY_HOST=(str, ""),
SEO_INDEX=(bool, False),
) )
# Read local secrets # Read local secrets
@ -89,6 +90,7 @@ TEMPLATES = [
"django.template.context_processors.request", "django.template.context_processors.request",
"django.contrib.auth.context_processors.auth", "django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages", "django.contrib.messages.context_processors.messages",
"website.utils.context_processors.global_vars",
], ],
}, },
}, },
@ -183,6 +185,7 @@ WAGTAILEMBEDS_FINDERS = [
UNSPLASH_CLIENT_ID = env("UNSPLASH_CLIENT_ID") UNSPLASH_CLIENT_ID = env("UNSPLASH_CLIENT_ID")
SPOTIFY_PROXY_HOST = env("SPOTIFY_PROXY_HOST") SPOTIFY_PROXY_HOST = env("SPOTIFY_PROXY_HOST")
SEO_INDEX = env("SEO_INDEX")
if DEBUG: if DEBUG:
# Add django-browser-reload # Add django-browser-reload

View file

@ -0,0 +1,6 @@
from django.conf import settings
from django.http.request import HttpRequest
def global_vars(request: HttpRequest) -> dict:
return {"SEO_INDEX": settings.SEO_INDEX}