-
Notifications
You must be signed in to change notification settings - Fork 427
/
Copy pathrobotstxt.go
executable file
·51 lines (41 loc) · 1.26 KB
/
robotstxt.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
package main
import (
"net/http"
"v2.staffjoy.com/faraday/services"
"github.com/gorilla/context"
"v2.staffjoy.com/environments"
)
const (
robotstxtPath = "/robots.txt"
robotstxtAllow = "User-agent: *\nDisallow:" // Disallow nothing
robotstxtDeny = "User-agent: *\nDisallow: /" // Disallow everything
)
// RobotstxtMiddleware is a negroni middleware that determines whether search engines
// should access a service
type RobotstxtMiddleware struct {
Config environments.Config
}
// NewRobotstxtMiddleware returns a new middleware for controlling search engines
func NewRobotstxtMiddleware(config environments.Config) *RobotstxtMiddleware {
return &RobotstxtMiddleware{
Config: config,
}
}
func (svc *RobotstxtMiddleware) ServeHTTP(res http.ResponseWriter, req *http.Request, next http.HandlerFunc) {
if req.URL.Path == robotstxtPath {
// Tell search engine what to do!
res.WriteHeader(http.StatusOK)
res.Header().Set("Content-Type", "text/plain")
// Default to disallow
service := context.Get(req, requestedService).(services.Service)
var body string
if (svc.Config.Name == "production") && (service.Security == services.Public) {
body = robotstxtAllow
} else {
body = robotstxtDeny
}
res.Write([]byte(body))
return
}
next(res, req)
}