Skip to content

Commit

Permalink
Added robots.txt to Stupid to prevent crawling
Browse files Browse the repository at this point in the history
  • Loading branch information
CalebQ42 committed Aug 7, 2023
1 parent de4477b commit 96d1011
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 0 deletions.
2 changes: 2 additions & 0 deletions embed/robots.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
User-agent: *
Disallow: /
8 changes: 8 additions & 0 deletions stupid.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package stupid

import (
"crypto/ed25519"
_ "embed"
"log"
"net/http"
"path"
Expand All @@ -12,6 +13,9 @@ import (
"github.com/CalebQ42/stupid-backend/pkg/db"
)

//go:embed embed/robots.txt
var robotsTxt []byte

// An instance of the stupid backend. Implements http.Handler
type Stupid struct {
keys db.Table
Expand Down Expand Up @@ -86,6 +90,10 @@ func (s *Stupid) ServeHTTP(w http.ResponseWriter, r *http.Request) {
Method: r.Method,
Resp: w,
}
if len(req.Path) == 1 && req.Path[0] == "robots.txt" {
w.Write(robotsTxt)
return
}
if s.headerValues != nil {
for k, v := range s.headerValues {
w.Header().Set(k, v)
Expand Down

0 comments on commit 96d1011

Please sign in to comment.