feat: add /robots.txt (#1355)

* feat: add /robots.txt

* disallow all crawler by default.
This commit is contained in:
oliverch
2019-09-17 05:21:08 +08:00
committed by Aaron Schlesinger
parent 800024fc6f
commit 874d27158d
7 changed files with 36 additions and 0 deletions
+8
View File
@@ -67,6 +67,14 @@ PprofPort = ":3001"
# that contains the letter `D` (for "Direct Access") in the first line.
FilterFile = ""
# The filename for the robots.txt.
# ENV override: ATHENS_ROBOTS_FILE
#
# To provide /robots.txt for net crawler.
# Default disallow all crawler.
# Content details to see https://support.google.com/webmasters/answer/6062608
RobotsFile = "robots.txt"
# Timeout is the timeout for external network calls in seconds
# This value is used as the default for storage backends if they don't specify timeouts
# Defaults to 300