From cdda5b6ece0e7b41b2d1a834ee7f8a0e5889ea53 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Mon, 31 Aug 2009 17:09:33 -0400 Subject: [PATCH] Adding robots.txt to be installed to avoid traversing of debian/ --- Makefile | 1 + sphinx/_static/robots.txt | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 sphinx/_static/robots.txt diff --git a/Makefile b/Makefile index fb0bc49..3e2ceaf 100644 --- a/Makefile +++ b/Makefile @@ -12,6 +12,7 @@ pics: html: pics source rsync -rvlhp sphinx/ build/src cd build/src && $(MAKE) html BUILDDIR=$(CURDIR)/build 2>&1 + mv $(WWW_DIR)/_static/robots.txt $(WWW_DIR)/ clean: diff --git a/sphinx/_static/robots.txt b/sphinx/_static/robots.txt new file mode 100644 index 0000000..dc181cc --- /dev/null +++ b/sphinx/_static/robots.txt @@ -0,0 +1,4 @@ +# robots.txt for http://neuro.debian.net + +User-agent: * +Disallow: /debian -- 2.39.2