summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChristian Grothoff <christian@grothoff.org>2014-11-19 17:55:44 +0100
committerChristian Grothoff <christian@grothoff.org>2014-11-19 17:55:44 +0100
commitc456fe4ce93d8ad3cc7182944ecaf54c5dac39cb (patch)
treee6ae887659149126c9e097656a296c3dd27489a4
parent648f103f5d7279e4298bb98f38314c8bb8e073a9 (diff)
downloadwww-c456fe4ce93d8ad3cc7182944ecaf54c5dac39cb.tar.gz
www-c456fe4ce93d8ad3cc7182944ecaf54c5dac39cb.tar.bz2
www-c456fe4ce93d8ad3cc7182944ecaf54c5dac39cb.zip
robots
-rw-r--r--robots.txt20
1 files changed, 20 insertions, 0 deletions
diff --git a/robots.txt b/robots.txt
new file mode 100644
index 00000000..0a639917
--- /dev/null
+++ b/robots.txt
@@ -0,0 +1,20 @@
+#
+# robots.txt
+#
+# This file is to prevent the crawling and indexing of certain parts
+# of your site by web crawlers and spiders run by sites like Yahoo!
+# and Google. By telling these "robots" where not to go on your site,
+# you save bandwidth and server resources.
+#
+# This file will be ignored unless it is at the root of your host:
+# Used: http://example.com/robots.txt
+# Ignored: http://example.com/site/robots.txt
+#
+# For more information about the robots.txt standard, see:
+# http://www.robotstxt.org/robotstxt.html
+#
+# For syntax checking, see:
+# http://www.frobee.com/robots-txt-check
+
+User-agent: *
+Crawl-delay: 10