Make prefixer support multiple tags, chunked enc

The prefixer needs to not only replace `src="` attributes as it
currently does because that is not the only place a relative URL can
appear. It needs to also prefix URLs found in CSS which can also come
from the downstream http.ResponseWriter.

This adds support for an arbitrary list of patterns that will cause the
prefixer to insert its configured prefix. This is currently set to look
for `src`, `href`, and `url()` attributes.

Also, because we are modifying the stream, we need to suppress the
Content-Length generated by any downstream http.Handlers and instead
enable Transfer-Encoding: chunked so that we can stream the modified
response (we don't know apriori how many times we'll perform a
prefixing, so we can't calculate a final Content-Length). This is
accomplished by duplicating the Headers in the wrapResponseWriter that
is handed to the `Next` handler. We also handle the chunking and
Flushing that needs to happen as a result of using chunked transfer
encoding.
pull/10616/head
Tim Raymond 2017-01-26 16:44:51 -05:00
parent 4064817ac8
commit 14437af28d
4 changed files with 144 additions and 20 deletions

View File

@ -56,6 +56,11 @@ func Assets(opts AssetsOpts) http.Handler {
up := URLPrefixer{
Prefix: "/chronograf",
Next: assets.Handler(),
Attrs: [][]byte{
[]byte(`src="`),
[]byte(`href="`),
[]byte(`url(`),
},
}
up.ServeHTTP(w, r)
})

View File

@ -4,62 +4,136 @@ import (
"bufio"
"bytes"
"io"
"log"
"net/http"
)
// URLPrefixer is a wrapper for an http.Handler that will prefix all occurrences of a relative URL with the configured Prefix
type URLPrefixer struct {
Prefix string
Next http.Handler
Prefix string // the prefix to be appended after any detected Attrs
Next http.Handler // the http.Handler which will generate the content to be modified by this handler
Attrs [][]byte // a list of attrs that should have their URLs prefixed. For example `src="` or `href="` would be valid
}
type wrapResponseWriter struct {
http.ResponseWriter
Substitute *io.PipeWriter
headerWritten bool
dupHeader http.Header
}
func (wrw wrapResponseWriter) Write(p []byte) (int, error) {
outCount, err := wrw.Substitute.Write(p)
if err != nil || outCount == len(p) {
wrw.Substitute.Close()
}
return outCount, err
return wrw.Substitute.Write(p)
}
func (wrw wrapResponseWriter) WriteHeader(code int) {
if !wrw.headerWritten {
wrw.ResponseWriter.Header().Set("Content-Type", wrw.Header().Get("Content-Type"))
wrw.headerWritten = true
}
wrw.ResponseWriter.WriteHeader(code)
}
// Header() copies the Header map from the underlying ResponseWriter to prevent
// modifications to it by callers
func (wrw wrapResponseWriter) Header() http.Header {
wrw.dupHeader = http.Header{}
origHeader := wrw.ResponseWriter.Header()
for k, v := range origHeader {
wrw.dupHeader[k] = v
}
return wrw.dupHeader
}
const CHUNK_SIZE int = 512
// ServeHTTP implements an http.Handler that prefixes relative URLs from the Next handler with the configured prefix
func (up *URLPrefixer) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
// chunked transfer because we're modifying the response on the fly, so we
// won't know the final content-length
rw.Header().Set("Connection", "Keep-Alive")
rw.Header().Set("Transfer-Encoding", "chunked")
//rw.Header().Set("X-Content-Type-Options", "nosniff")
writtenCount := 0 // number of bytes written to rw
flusher, ok := rw.(http.Flusher)
if !ok {
log.Fatalln("Exected http.ResponseWriter to be an http.Flusher, but wasn't")
}
nextRead, nextWrite := io.Pipe()
go up.Next.ServeHTTP(wrapResponseWriter{rw, nextWrite}, r)
go func() {
defer nextWrite.Close()
up.Next.ServeHTTP(wrapResponseWriter{ResponseWriter: rw, Substitute: nextWrite}, r)
}()
srctag := []byte(`src="`)
// Locate src tags, flushing everything that isn't to rw
b := make([]byte, len(srctag))
// setup a buffer which is the max length of our target attrs
b := make([]byte, up.maxlen(up.Attrs...))
io.ReadFull(nextRead, b) // prime the buffer with the start of the input
buf := bytes.NewBuffer(b)
// Read next handler's response byte by byte
src := bufio.NewScanner(nextRead)
src.Split(bufio.ScanBytes)
for {
window := buf.Bytes()
// advance a byte if window is not a src attr
if !bytes.Equal(window, srctag) {
if matchlen, match := up.match(window, up.Attrs...); matchlen == 0 {
if src.Scan() {
// shift the next byte into buf
rw.Write(buf.Next(1))
writtenCount++
buf.Write(src.Bytes())
if writtenCount >= CHUNK_SIZE {
flusher.Flush()
writtenCount = 0
}
} else {
if err := src.Err(); err != nil {
log.Println("Error encountered while scanning: err:", err)
}
rw.Write(window)
flusher.Flush()
break
}
continue
} else {
buf.Next(len(srctag)) // advance to the relative URL
for i := 0; i < len(srctag); i++ {
buf.Next(matchlen) // advance to the relative URL
for i := 0; i < matchlen; i++ {
src.Scan()
buf.Write(src.Bytes())
}
rw.Write(srctag) // add the src attr to the output
rw.Write(match) // add the src attr to the output
io.WriteString(rw, up.Prefix) // write the prefix
}
}
}
// match compares the subject against a list of targets. If there is a match
// between any of them a non-zero value is returned. The returned value is the
// length of the match. It is assumed that subject's length > length of all
// targets. The matching []byte is also returned as the second return parameter
func (up *URLPrefixer) match(subject []byte, targets ...[]byte) (int, []byte) {
for _, target := range targets {
if bytes.Equal(subject[:len(target)], target) {
return len(target), target
}
}
return 0, []byte{}
}
// maxlen returns the length of the largest []byte provided to it as an argument
func (up *URLPrefixer) maxlen(targets ...[]byte) int {
max := 0
for _, tgt := range targets {
if tlen := len(tgt); tlen > max {
max = tlen
}
}
return max
}

View File

@ -15,18 +15,63 @@ var prefixerTests = []struct {
subject string
expected string
shouldErr bool
attrs [][]byte
}{
{
`One script tag`,
`<script type="text/javascript" src="/loljavascript.min.js">`,
`<script type="text/javascript" src="/arbitraryprefix/loljavascript.min.js">`,
false,
[][]byte{
[]byte(`src="`),
},
},
{
`Two script tags`,
`<script type="text/javascript" src="/loljavascript.min.js"><script type="text/javascript" src="/anotherscript.min.js">`,
`<script type="text/javascript" src="/arbitraryprefix/loljavascript.min.js"><script type="text/javascript" src="/arbitraryprefix/anotherscript.min.js">`,
false,
[][]byte{
[]byte(`src="`),
},
},
{
`Link href`,
`<link rel="shortcut icon" href="/favicon.ico">`,
`<link rel="shortcut icon" href="/arbitraryprefix/favicon.ico">`,
false,
[][]byte{
[]byte(`src="`),
[]byte(`href="`),
},
},
{
`Trailing HTML`,
`<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8"/>
<title>Chronograf</title>
<link rel="shortcut icon" href="/favicon.ico"><link href="/chronograf.css" rel="stylesheet"></head>
<body>
<div id='react-root'></div>
<script type="text/javascript" src="/manifest.7489452b099f9581ca1b.dev.js"></script><script type="text/javascript" src="/vendor.568c0101d870a13ecff9.dev.js"></script><script type="text/javascript" src="/app.13d0ce0b33609be3802b.dev.js"></script></body>
</html>`,
`<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8"/>
<title>Chronograf</title>
<link rel="shortcut icon" href="/arbitraryprefix/favicon.ico"><link href="/arbitraryprefix/chronograf.css" rel="stylesheet"></head>
<body>
<div id='react-root'></div>
<script type="text/javascript" src="/arbitraryprefix/manifest.7489452b099f9581ca1b.dev.js"></script><script type="text/javascript" src="/arbitraryprefix/vendor.568c0101d870a13ecff9.dev.js"></script><script type="text/javascript" src="/arbitraryprefix/app.13d0ce0b33609be3802b.dev.js"></script></body>
</html>`,
false,
[][]byte{
[]byte(`src="`),
[]byte(`href="`),
},
},
}
@ -41,7 +86,7 @@ func Test_Server_Prefixer_RewritesURLs(t *testing.T) {
fmt.Fprintln(w, subject)
})
pfx := &server.URLPrefixer{Prefix: "/arbitraryprefix", Next: backend}
pfx := &server.URLPrefixer{Prefix: "/arbitraryprefix", Next: backend, Attrs: test.attrs}
ts := httptest.NewServer(pfx)
defer ts.Close()
@ -57,7 +102,7 @@ func Test_Server_Prefixer_RewritesURLs(t *testing.T) {
}
if string(actual) != expected+"\n" {
t.Error(test.name, ":\n Unsuccessful prefixing.\n\tWant:", expected, "\n\tGot:", string(actual))
t.Error(test.name, ":\n Unsuccessful prefixing.\n\tWant:", fmt.Sprintf("%+q", expected), "\n\tGot: ", fmt.Sprintf("%+q", string(actual)))
}
}
}

View File

@ -7,4 +7,4 @@
<body>
<div id='react-root'></div>
</body>
</html
</html>