commit 7b37090a8e17f93ab5ab043a2aa0fe717870cc6e
parent 759c0493b08f7675a2943be9d93cb8b066bff71c
Author: René Wagner <rwagner@rw-net.de>
Date: Thu, 28 Jan 2021 11:33:45 +0100
add "/robots.txt" route to views.py
It's a hard coded approach to serve a robots.txt to other crawlers.
No crawler may access /add-seed & /threads and all relevant virtual agents
may not access /search and /backlinks
Signed-off-by: Natalie Pendragon <natpen@natpen.net>
Diffstat:
1 file changed, 4 insertions(+), 0 deletions(-)
diff --git a/serve/views.py b/serve/views.py
@@ -57,6 +57,10 @@ gus = GUS()
def status(request):
return Response(Status.SUCCESS, "text/plain", "ok")
+@app.route("/robots.txt", strict_trailing_slash=False)
+def status(request):
+ return Response(Status.SUCCESS, "text/plain",
+ "User-agent: researcher\nUser-agent: indexer\nUser-agent: archiver\nDisallow: /search\nDisallow: /backlinks\n\nUser-agent: *\nDisallow: /add-seed\nDisallow: /threads")
@app.route("/favicon.txt", strict_trailing_slash=False)
def favicon(request):