From b8b349b39558af3ea4d996dc1001f8f8db15e83b Mon Sep 17 00:00:00 2001 From: Benjamin Kiah Stroud <32469930+bkiahstroud@users.noreply.github.com> Date: Thu, 27 Jun 2024 13:39:24 -0700 Subject: [PATCH] block all user agents from entire site This is meant to be a temporary change to prevent the site from being indexed by search engines until content has been ingested --- api/public/robots.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/public/robots.txt b/api/public/robots.txt index 3c9c7c01f3..f6ca098a03 100644 --- a/api/public/robots.txt +++ b/api/public/robots.txt @@ -1,5 +1,5 @@ # See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file # # To ban all spiders from the entire site uncomment the next two lines: -# User-agent: * -# Disallow: / +User-agent: * +Disallow: /