/
proxy.go
182 lines (158 loc) · 5.23 KB
/
proxy.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
package main
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"net"
"net/http"
"net/http/httputil"
"strings"
)
func (app *application) rewrite(r *httputil.ProxyRequest) {
domain := app.domain
if !strings.HasPrefix(domain, ".") {
domain = fmt.Sprintf(".%s", domain)
}
host, port, err := net.SplitHostPort(r.In.Host)
if err != nil {
// no port present
host = r.In.Host
port = r.In.URL.Port()
}
host = strings.TrimSuffix(host, domain)
host = strings.TrimSuffix(host, ".")
host = fmt.Sprintf("%s.onion", host)
if port != "" && port != "80" && port != "443" {
host = net.JoinHostPort(host, port)
}
scheme := r.In.URL.Scheme
if scheme == "" {
h := r.In.Header.Get("X-Forwarded-Proto")
if h != "" {
scheme = h
} else {
switch port {
case "":
scheme = "http"
case "80":
scheme = "http"
case "443":
scheme = "https"
default:
scheme = "http"
}
}
}
if r.In.TLS != nil {
scheme = "https"
}
r.Out.Host = host
r.Out.URL.Scheme = scheme
r.Out.URL.Host = host
app.logger.Debugf("modified request: %+v", r.Out)
}
// modify the response
func (app *application) proxyErrorHandler(w http.ResponseWriter, r *http.Request, err error) {
app.logError(w, err, http.StatusBadGateway)
}
// modify the response
func (app *application) modifyResponse(resp *http.Response) error {
app.logger.Debugf("entered modifyResponse for %s with status %d", sanitizeString(resp.Request.URL.String()), resp.StatusCode)
domain := app.domain
if !strings.HasPrefix(domain, ".") {
domain = fmt.Sprintf(".%s", domain)
}
app.logger.Debugf("Header: %#v", resp.Header)
for k, v := range resp.Header {
k = strings.ReplaceAll(k, ".onion", domain)
resp.Header[k] = []string{}
for _, v2 := range v {
v2 = strings.ReplaceAll(v2, ".onion", domain)
resp.Header[k] = append(resp.Header[k], v2)
}
}
// remove headers like HSTS
headersToRemove := []string{"Strict-Transport-Security", "Public-Key-Pins", "Public-Key-Pins-Report-Only"}
for _, h := range headersToRemove {
resp.Header.Del(h)
}
// no body modification on file downloads
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition
contentDisp, ok := resp.Header["Content-Disposition"]
if ok && len(contentDisp) > 0 && strings.HasPrefix(contentDisp[0], "attachment") {
app.logger.Debugf("%s - detected file download, not attempting to modify body", sanitizeString(resp.Request.URL.String()))
return nil
}
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types
contentTypesForReplace := []string{
"text/plain",
"text/html",
"text/css",
"text/javascript",
"text/xml",
"application/javascript",
"application/json",
"application/ld+json",
"application/xml",
"application/rss+xml",
"application/atom+xml",
"application/rdf+xml",
}
contentType, ok := resp.Header["Content-Type"]
if !ok {
app.logger.Debugf("%s - no content type skipping replace", sanitizeString(resp.Request.URL.String()))
return nil
}
if ok && len(contentType) > 0 {
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type
cleanedUpContentType := strings.Split(contentType[0], ";")[0]
if !sliceContains(contentTypesForReplace, cleanedUpContentType) {
app.logger.Debugf("%s - content type is %s, not replacing", sanitizeString(resp.Request.URL.String()), cleanedUpContentType)
return nil
}
}
app.logger.Debugf("%s - found content type %s, replacing strings", sanitizeString(resp.Request.URL.String()), contentType[0])
reader := resp.Body
usedGzip := false
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
if strings.EqualFold(resp.Header.Get("Content-Encoding"), "gzip") {
app.logger.Debugf("%s - detected gzipped body", sanitizeString(resp.Request.URL.String()))
var err error
reader, err = gzip.NewReader(resp.Body)
if err != nil {
return fmt.Errorf("could not create gzip reader: %w", err)
}
// resp.Header.Del("Content-Encoding")
usedGzip = true
}
// for all other content replace .onion urls with our custom domain
body, err := io.ReadAll(reader)
if err != nil {
return fmt.Errorf("error on reading body: %w", err)
}
app.logger.Debugf("%s: Got a %d body len", sanitizeString(resp.Request.URL.String()), len(body))
// replace stuff for domain replacement
body = bytes.ReplaceAll(body, []byte(".onion/"), []byte(fmt.Sprintf("%s/", domain)))
body = bytes.ReplaceAll(body, []byte(`.onion"`), []byte(fmt.Sprintf(`%s"`, domain)))
body = bytes.ReplaceAll(body, []byte(".onion<"), []byte(fmt.Sprintf("%s<", domain)))
for word, re := range app.blacklistedwords {
if re.Match(body) {
return fmt.Errorf("access to the site is forbidden because it contains the blacklisted word %q", word)
}
}
// if we unpacked before, respect the client and repack the modified body (the header is still set)
if usedGzip {
app.logger.Debugf("%s - re gzipping body", sanitizeString(resp.Request.URL.String()))
gzipped, err := gzipInput(body)
if err != nil {
return fmt.Errorf("could not gzip body: %w", err)
}
body = gzipped
}
// body can be read only once so recreate a new reader
resp.Body = io.NopCloser(bytes.NewBuffer(body))
// update the content-length to our new body
resp.Header["Content-Length"] = []string{fmt.Sprint(len(body))}
return nil
}