taler-docs

Documentation for GNU Taler components, APIs and protocols
Log | Files | Refs | README | LICENSE

commit bba5907f359908d0cb01ef48a7f994eeb93ce081
parent 0c8a2b29e26e364fa7048b3607f234cf66f04b37
Author: Marcello Stanisci <marcello.stanisci@inria.fr>
Date:   Mon, 20 Jun 2016 18:18:24 +0200

using /var/www/favicon_robots in "api"

Diffstat:
MMakefile | 2--
Dfavicon.ico | 0
Drobots.txt | 20--------------------
3 files changed, 0 insertions(+), 22 deletions(-)

diff --git a/Makefile b/Makefile @@ -53,8 +53,6 @@ clean: # remove all cached state first. html: $(SPHINXBUILD) -b html-linked $(ALLSPHINXOPTS) $(BUILDDIR)/html - @cp favicon.ico $(BUILDDIR)/html - @cp robots.txt $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." diff --git a/favicon.ico b/favicon.ico Binary files differ. diff --git a/robots.txt b/robots.txt @@ -1,20 +0,0 @@ -# -# robots.txt -# -# This file is to prevent the crawling and indexing of certain parts -# of your site by web crawlers and spiders run by sites like Yahoo! -# and Google. By telling these "robots" where not to go on your site, -# you save bandwidth and server resources. -# -# This file will be ignored unless it is at the root of your host: -# Used: http://example.com/robots.txt -# Ignored: http://example.com/site/robots.txt -# -# For more information about the robots.txt standard, see: -# http://www.robotstxt.org/robotstxt.html -# -# For syntax checking, see: -# http://www.frobee.com/robots-txt-check - -User-agent: * -Crawl-delay: 10