summaryrefslogtreecommitdiffstats
path: root/fatcat_scholar/static
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@archive.org>2022-10-30 13:50:21 -0700
committerBryan Newbold <bnewbold@archive.org>2022-10-30 13:50:23 -0700
commit9da8c9bdb9149f3701f8774c1a220de889e7cc59 (patch)
tree19780c33290c393f21127a945350f7a8fb86ae1f /fatcat_scholar/static
parent1fc76063056c87da3b769df45f4f0a7340536f9c (diff)
downloadfatcat-scholar-9da8c9bdb9149f3701f8774c1a220de889e7cc59.tar.gz
fatcat-scholar-9da8c9bdb9149f3701f8774c1a220de889e7cc59.zip
robots: block yacybot from search page
A couple crawlers causing load.
Diffstat (limited to 'fatcat_scholar/static')
-rw-r--r--fatcat_scholar/static/robots.allow.txt1
1 files changed, 1 insertions, 0 deletions
diff --git a/fatcat_scholar/static/robots.allow.txt b/fatcat_scholar/static/robots.allow.txt
index d63fcb9..6076e75 100644
--- a/fatcat_scholar/static/robots.allow.txt
+++ b/fatcat_scholar/static/robots.allow.txt
@@ -8,6 +8,7 @@ User-agent: YandexBot
User-agent: bingbot
User-agent: Googlebot
User-agent: SemanticScholarBot
+User-agent: yacybot
Disallow: /search
# crawling search result pages is expensive, so we do specify a long crawl