[3.4] Disallow crawling of versioned documentation in robots.txt (#5549) (#5725)

(cherry picked from commit e33985f969)

Co-authored-by: jack1142 <6032823+jack1142@users.noreply.github.com>

Co-authored-by: jack1142 <6032823+jack1142@users.noreply.github.com>
This commit is contained in:
Red-GitHubBot
2022-06-02 12:46:15 +02:00
committed by GitHub
parent dce0b713fe
commit a3a6e5cbaf
2 changed files with 12 additions and 0 deletions

6
docs/_html/robots.txt Normal file
View File

@@ -0,0 +1,6 @@
User-agent: *
Disallow: /
Allow: /en/stable
Allow: /en/latest
Sitemap: https://docs.discord.red/sitemap.xml

View File

@@ -117,6 +117,12 @@ rst_prolog += f"\n.. |DPY_VERSION| replace:: {dpy_version}"
#
html_theme = "sphinx_rtd_theme"
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
html_extra_path = ["_html"]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.