Add robots.txt handler (#604)

Disallow all robots from accessing the phishing server, to prevent phishing materials from being indexed during campaigns.
pull/603/head
Matt D 2017-04-28 11:04:22 +12:00 committed by Jordan Wright
parent adf9fa7208
commit 5f5c8141c9
1 changed files with 6 additions and 0 deletions

View File

@ -84,6 +84,7 @@ func CreatePhishingRouter() http.Handler {
router := mux.NewRouter() router := mux.NewRouter()
router.PathPrefix("/static/").Handler(http.StripPrefix("/static/", http.FileServer(http.Dir("./static/endpoint/")))) router.PathPrefix("/static/").Handler(http.StripPrefix("/static/", http.FileServer(http.Dir("./static/endpoint/"))))
router.HandleFunc("/track", PhishTracker) router.HandleFunc("/track", PhishTracker)
router.HandleFunc("/robots.txt", RobotsHandler)
router.HandleFunc("/{path:.*}/track", PhishTracker) router.HandleFunc("/{path:.*}/track", PhishTracker)
router.HandleFunc("/{path:.*}", PhishHandler) router.HandleFunc("/{path:.*}", PhishHandler)
return router return router
@ -271,6 +272,11 @@ func PhishHandler(w http.ResponseWriter, r *http.Request) {
w.Write(htmlBuff.Bytes()) w.Write(htmlBuff.Bytes())
} }
// Prevents search engines, etc. from indexing phishing materials
func RobotsHandler(w http.ResponseWriter, r *http.Request) {
w.Write(([]byte)("User-agent: *\nDisallow: /\n"))
}
// Use allows us to stack middleware to process the request // Use allows us to stack middleware to process the request
// Example taken from https://github.com/gorilla/mux/pull/36#issuecomment-25849172 // Example taken from https://github.com/gorilla/mux/pull/36#issuecomment-25849172
func Use(handler http.HandlerFunc, mid ...func(http.Handler) http.HandlerFunc) http.HandlerFunc { func Use(handler http.HandlerFunc, mid ...func(http.Handler) http.HandlerFunc) http.HandlerFunc {