Add a static robots.txt file just prohibiting all crawlers.
Signed-off-by: Julien Goodwin <jgoodwin(a)studio442.com.au>
---
src/kimchi/config.py.in | 4 ++++
ui/Makefile.am | 4 ++++
ui/robots.txt | 2 ++
3 files changed, 10 insertions(+)
create mode 100644 ui/robots.txt
diff --git a/src/kimchi/config.py.in b/src/kimchi/config.py.in
index 83a5dd0..f36cc32 100644
--- a/src/kimchi/config.py.in
+++ b/src/kimchi/config.py.in
@@ -245,6 +245,10 @@ class KimchiConfig(dict):
'tools.staticfile.on': True,
'tools.staticfile.filename': '%s/images/logo.ico' %
paths.ui_dir
},
+ '/robots.txt': {
+ 'tools.staticfile.on': True,
+ 'tools.staticfile.filename': '%s/robots.txt' % paths.ui_dir
+ },
'/help': {
'tools.staticdir.on': True,
'tools.staticdir.dir': '%s/ui/pages/help' % paths.prefix,
diff --git a/ui/Makefile.am b/ui/Makefile.am
index 5192162..d541355 100644
--- a/ui/Makefile.am
+++ b/ui/Makefile.am
@@ -16,3 +16,7 @@
# limitations under the License.
SUBDIRS = css images js libs pages spice-html5
+
+uidir = $(datadir)/kimchi/ui
+
+dist_ui_DATA = robots.txt
diff --git a/ui/robots.txt b/ui/robots.txt
new file mode 100644
index 0000000..1f53798
--- /dev/null
+++ b/ui/robots.txt
@@ -0,0 +1,2 @@
+User-agent: *
+Disallow: /
--
2.1.4