Skip to content

Instantly share code, notes, and snippets.

@cnk
Created June 22, 2022 14:57
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save cnk/adb895a5e9cfb4626a4442cbbaf0fbf6 to your computer and use it in GitHub Desktop.
Save cnk/adb895a5e9cfb4626a4442cbbaf0fbf6 to your computer and use it in GitHub Desktop.
from django.db import models
class RobotsTxtMixin(models.Model):
"""
Always mix this class in BEFORE wagtailcore.Page. Otherwise, its override of get_sitemap_urls() won't get called.
"""
hide_from_search_engines = models.BooleanField(
default=False,
help_text="Prevent this page from being indexed by search engines. "
"You must enable this setting before you publish the page for it to have the desired effect."
)
class Meta:
abstract = True
def get_sitemap_urls(self, request=None):
"""
Exclude from the XML sitemap all Pages which have hide_from_search_engines enabled.
"""
if self.hide_from_search_engines:
return []
else:
# Every class that derives from RobotsTxtMixin also derives from Page, which has get_sitemap_urls().
# noinspection PyUnresolvedReferences
return super().get_sitemap_urls(request)
<head>
{% if self.hide_from_search_engines %}
<meta name="robots" content="noindex">
{% endif %}
</head>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment