From 5f5c8141c976e9d33c9b9fd22d59a461e740e9c8 Mon Sep 17 00:00:00 2001 From: Matt D Date: Fri, 28 Apr 2017 11:04:22 +1200 Subject: [PATCH] Add robots.txt handler (#604) Disallow all robots from accessing the phishing server, to prevent phishing materials from being indexed during campaigns. --- controllers/route.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/controllers/route.go b/controllers/route.go index ce6ec676..a5682318 100644 --- a/controllers/route.go +++ b/controllers/route.go @@ -84,6 +84,7 @@ func CreatePhishingRouter() http.Handler { router := mux.NewRouter() router.PathPrefix("/static/").Handler(http.StripPrefix("/static/", http.FileServer(http.Dir("./static/endpoint/")))) router.HandleFunc("/track", PhishTracker) + router.HandleFunc("/robots.txt", RobotsHandler) router.HandleFunc("/{path:.*}/track", PhishTracker) router.HandleFunc("/{path:.*}", PhishHandler) return router @@ -271,6 +272,11 @@ func PhishHandler(w http.ResponseWriter, r *http.Request) { w.Write(htmlBuff.Bytes()) } +// Prevents search engines, etc. from indexing phishing materials +func RobotsHandler(w http.ResponseWriter, r *http.Request) { + w.Write(([]byte)("User-agent: *\nDisallow: /\n")) +} + // Use allows us to stack middleware to process the request // Example taken from https://github.com/gorilla/mux/pull/36#issuecomment-25849172 func Use(handler http.HandlerFunc, mid ...func(http.Handler) http.HandlerFunc) http.HandlerFunc {