Block AI bots
This commit is contained in:
parent
43503921db
commit
2639d6eb1c
3 changed files with 16 additions and 5 deletions
|
@ -1,8 +1,7 @@
|
||||||
User-agent: *
|
User-agent: *
|
||||||
{% if SEO_INDEX %}
|
{% if SEO_INDEX %}Allow: /{% else %}Disallow: /{% endif %}
|
||||||
Allow: /
|
|
||||||
{% else %}
|
# https://github.com/ai-robots-txt/ai.robots.txt
|
||||||
Disallow: /
|
{{ ai_robots_txt }}
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
Sitemap: {{ sitemap }}
|
Sitemap: {{ sitemap }}
|
||||||
|
|
|
@ -112,3 +112,13 @@ def get_or_none(queryset: models.QuerySet) -> models.Model:
|
||||||
return queryset.get()
|
return queryset.get()
|
||||||
except (queryset.model.DoesNotExist, queryset.model.MultipleObjectsReturned):
|
except (queryset.model.DoesNotExist, queryset.model.MultipleObjectsReturned):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@django_cache_decorator(time=21600)
|
||||||
|
def get_ai_robots_txt() -> str:
|
||||||
|
"""
|
||||||
|
https://github.com/ai-robots-txt/ai.robots.txt
|
||||||
|
"""
|
||||||
|
return requests_session.get(
|
||||||
|
"https://raw.githubusercontent.com/ai-robots-txt/ai.robots.txt/main/robots.txt"
|
||||||
|
).content.decode()
|
||||||
|
|
|
@ -23,6 +23,7 @@ from website.search.models import SearchPage
|
||||||
|
|
||||||
from .feed_generators import CustomFeed
|
from .feed_generators import CustomFeed
|
||||||
from .models import BaseListingPage, BasePage
|
from .models import BaseListingPage, BasePage
|
||||||
|
from .utils import get_ai_robots_txt
|
||||||
|
|
||||||
|
|
||||||
class Error404View(TemplateView):
|
class Error404View(TemplateView):
|
||||||
|
@ -52,6 +53,7 @@ class RobotsView(TemplateView):
|
||||||
def get_context_data(self, **kwargs: dict) -> dict:
|
def get_context_data(self, **kwargs: dict) -> dict:
|
||||||
context = super().get_context_data(**kwargs)
|
context = super().get_context_data(**kwargs)
|
||||||
context["sitemap"] = self.request.build_absolute_uri(reverse("sitemap"))
|
context["sitemap"] = self.request.build_absolute_uri(reverse("sitemap"))
|
||||||
|
context["ai_robots_txt"] = get_ai_robots_txt()
|
||||||
return context
|
return context
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue