summaryrefslogtreecommitdiffstats
path: root/python/fatcat_web/static
diff options
context:
space:
mode:
authorbnewbold <bnewbold@archive.org>2020-08-20 21:17:59 +0000
committerbnewbold <bnewbold@archive.org>2020-08-20 21:17:59 +0000
commitdaf91b137483b7345448b597289c78f8fb3f9969 (patch)
tree712c27d902235d8d007763b512c57eaecd8045ad /python/fatcat_web/static
parent5007ee299ce07b31db6d48cd4ab2587f87af53ab (diff)
parent2a98d10be1cc1368f9510745bff07c343974d4a7 (diff)
downloadfatcat-daf91b137483b7345448b597289c78f8fb3f9969.tar.gz
fatcat-daf91b137483b7345448b597289c78f8fb3f9969.zip
Merge branch 'bnewbold-sitemap' into 'master'
basic sitemap setup See merge request webgroup/fatcat!79
Diffstat (limited to 'python/fatcat_web/static')
-rw-r--r--python/fatcat_web/static/robots.deny_all.txt7
-rw-r--r--python/fatcat_web/static/robots.txt19
-rw-r--r--python/fatcat_web/static/sitemap.xml13
3 files changed, 39 insertions, 0 deletions
diff --git a/python/fatcat_web/static/robots.deny_all.txt b/python/fatcat_web/static/robots.deny_all.txt
new file mode 100644
index 00000000..b88274b1
--- /dev/null
+++ b/python/fatcat_web/static/robots.deny_all.txt
@@ -0,0 +1,7 @@
+# Hello friends!
+
+# You have found a QA/development instance of the Fatcat catalog. The canonical
+# location is https://fatcat.wiki, please crawl and index that location instead.
+
+User-agent: *
+Disallow: /
diff --git a/python/fatcat_web/static/robots.txt b/python/fatcat_web/static/robots.txt
index a168f11b..e89af36e 100644
--- a/python/fatcat_web/static/robots.txt
+++ b/python/fatcat_web/static/robots.txt
@@ -1 +1,20 @@
# Hello friends!
+# If you are considering large or automated crawling, you may want to look at
+# our API (https://api.fatcat.wiki) or bulk database snapshots instead.
+
+# by default, can crawl anything on this domain. HTTP 429 ("backoff") status
+# codes are used for rate-limiting instead of any crawl delay specified here.
+# Up to a handful concurrent requests should be fine.
+User-agent: *
+Allow: /
+
+# crawling search result pages is expensive, so we do specify a long crawl delay for those
+User-agent: *
+Allow: /release/search
+Allow: /container/search
+Allow: /coverage/search
+Crawl-delay: 5
+
+Sitemap: https://fatcat.wiki/sitemap.xml
+Sitemap: https://fatcat.wiki/sitemap-index-releases.xml
+Sitemap: https://fatcat.wiki/sitemap-index-containers.xml
diff --git a/python/fatcat_web/static/sitemap.xml b/python/fatcat_web/static/sitemap.xml
new file mode 100644
index 00000000..e6189aa4
--- /dev/null
+++ b/python/fatcat_web/static/sitemap.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
+ <!-- basic site pages -->
+ <url><loc>https://fatcat.wiki/</loc></url>
+ <url><loc>https://fatcat.wiki/about</loc></url>
+ <url><loc>https://fatcat.wiki/rfc</loc></url>
+ <url><loc>https://fatcat.wiki/stats</loc></url>
+ <url><loc>https://fatcat.wiki/changelog</loc></url>
+ <url><loc>https://fatcat.wiki/release/lookup</loc></url>
+ <url><loc>https://fatcat.wiki/container/lookup</loc></url>
+ <url><loc>https://fatcat.wiki/file/lookup</loc></url>
+ <!-- additional entity-level URL lists are linked from robots.txt -->
+</urlset>