frontend: add robots.txt to disallow
Signed-off-by: Ahmet Alp Balkan <ahmetb@google.com>
This commit is contained in:
parent
4683ee449c
commit
04c3876e94
1 changed files with 1 additions and 0 deletions
|
@ -119,6 +119,7 @@ func main() {
|
||||||
r.HandleFunc("/logout", svc.logoutHandler).Methods(http.MethodGet)
|
r.HandleFunc("/logout", svc.logoutHandler).Methods(http.MethodGet)
|
||||||
r.HandleFunc("/cart/checkout", svc.placeOrderHandler).Methods(http.MethodPost)
|
r.HandleFunc("/cart/checkout", svc.placeOrderHandler).Methods(http.MethodPost)
|
||||||
r.PathPrefix("/static/").Handler(http.StripPrefix("/static/", http.FileServer(http.Dir("./static/"))))
|
r.PathPrefix("/static/").Handler(http.StripPrefix("/static/", http.FileServer(http.Dir("./static/"))))
|
||||||
|
r.HandleFunc("/robots.txt", func(w http.ResponseWriter, r *http.Request) { fmt.Fprint(w, "User-agent: *\nDisallow: /") })
|
||||||
|
|
||||||
var handler http.Handler = r
|
var handler http.Handler = r
|
||||||
handler = &logHandler{log: log, next: handler} // add logging
|
handler = &logHandler{log: log, next: handler} // add logging
|
||||||
|
|
Loading…
Reference in a new issue