2017-11-08 00:10:54 +01:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2017-11-15 15:37:54 +01:00
|
|
|
"compress/gzip"
|
2018-07-21 19:50:53 +02:00
|
|
|
"encoding/json"
|
2017-11-18 13:23:36 +01:00
|
|
|
"fmt"
|
2018-11-25 18:10:10 +01:00
|
|
|
"html/template"
|
2017-11-08 00:10:54 +01:00
|
|
|
"io"
|
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
2018-11-22 22:45:20 +01:00
|
|
|
"net/http/httptest"
|
2017-11-08 00:10:54 +01:00
|
|
|
"net/http/httputil"
|
2018-08-04 01:53:54 +02:00
|
|
|
"net/url"
|
2018-11-24 11:03:16 +01:00
|
|
|
"plugin"
|
2018-07-21 19:50:53 +02:00
|
|
|
"strings"
|
2017-11-08 00:10:54 +01:00
|
|
|
|
2018-11-24 18:51:33 +01:00
|
|
|
"github.com/ofabricio/curl"
|
2017-11-21 22:36:40 +01:00
|
|
|
)
|
2017-11-15 15:37:54 +01:00
|
|
|
|
2018-11-29 20:52:14 +01:00
|
|
|
// StatusInternalProxyError is any unknown proxy error
|
|
|
|
const StatusInternalProxyError = 999
|
|
|
|
|
2017-11-08 00:10:54 +01:00
|
|
|
func main() {
|
2018-09-16 16:21:36 +02:00
|
|
|
config := ReadConfig()
|
2018-11-11 17:54:35 +01:00
|
|
|
startCapture(config)
|
|
|
|
}
|
2017-11-08 00:10:54 +01:00
|
|
|
|
2018-11-11 17:54:35 +01:00
|
|
|
func startCapture(config Config) {
|
2018-11-22 22:45:20 +01:00
|
|
|
|
2018-11-25 18:10:10 +01:00
|
|
|
list := NewCaptureList(config.MaxCaptures)
|
2018-11-22 22:45:20 +01:00
|
|
|
|
2019-03-10 16:05:21 +01:00
|
|
|
handler := NewRecorder(list, NewPlugin(NewProxyHandler(config.TargetURL)))
|
2018-11-25 22:24:59 +01:00
|
|
|
|
2018-12-01 22:45:41 +01:00
|
|
|
http.HandleFunc("/", handler)
|
2018-12-01 23:59:33 +01:00
|
|
|
http.HandleFunc(config.DashboardPath, NewDashboardHTMLHandler(config))
|
2018-12-01 22:45:41 +01:00
|
|
|
http.HandleFunc(config.DashboardConnPath, NewDashboardConnHandler(list))
|
|
|
|
http.HandleFunc(config.DashboardClearPath, NewDashboardClearHandler(list))
|
|
|
|
http.HandleFunc(config.DashboardRetryPath, NewDashboardRetryHandler(list, handler))
|
|
|
|
http.HandleFunc(config.DashboardItemInfoPath, NewDashboardItemInfoHandler(list))
|
2018-09-16 16:18:39 +02:00
|
|
|
|
2018-11-16 22:39:53 +01:00
|
|
|
captureHost := fmt.Sprintf("http://localhost:%s", config.ProxyPort)
|
2017-11-18 13:23:36 +01:00
|
|
|
|
2018-11-16 22:39:53 +01:00
|
|
|
fmt.Printf("\nListening on %s", captureHost)
|
|
|
|
fmt.Printf("\n %s/%s\n\n", captureHost, config.Dashboard)
|
2017-11-18 13:23:36 +01:00
|
|
|
|
2018-09-16 16:21:36 +02:00
|
|
|
fmt.Println(http.ListenAndServe(":"+config.ProxyPort, nil))
|
2017-11-08 00:10:54 +01:00
|
|
|
}
|
|
|
|
|
2018-12-01 23:59:33 +01:00
|
|
|
// NewDashboardConnHandler opens an event stream connection with the dashboard
|
|
|
|
// so that it is notified everytime a new capture arrives
|
2018-12-01 22:45:41 +01:00
|
|
|
func NewDashboardConnHandler(list *CaptureList) http.HandlerFunc {
|
|
|
|
return func(rw http.ResponseWriter, req *http.Request) {
|
2018-11-25 18:10:10 +01:00
|
|
|
if _, ok := rw.(http.Flusher); !ok {
|
|
|
|
fmt.Printf("streaming not supported at %s\n", req.URL)
|
|
|
|
http.Error(rw, "streaming not supported", http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
rw.Header().Set("Content-Type", "text/event-stream")
|
|
|
|
rw.Header().Set("Cache-Control", "no-cache")
|
|
|
|
for {
|
|
|
|
jsn, _ := json.Marshal(list.ItemsAsMetadata())
|
|
|
|
fmt.Fprintf(rw, "event: captures\ndata: %s\n\n", jsn)
|
|
|
|
rw.(http.Flusher).Flush()
|
|
|
|
|
|
|
|
select {
|
2018-12-02 15:17:23 +01:00
|
|
|
case <-list.Updated():
|
2018-11-25 18:10:10 +01:00
|
|
|
case <-req.Context().Done():
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2018-12-01 22:45:41 +01:00
|
|
|
}
|
2017-11-21 22:36:40 +01:00
|
|
|
}
|
|
|
|
|
2018-12-01 23:59:33 +01:00
|
|
|
// NewDashboardClearHandler clears all the captures
|
2018-12-01 22:45:41 +01:00
|
|
|
func NewDashboardClearHandler(list *CaptureList) http.HandlerFunc {
|
|
|
|
return func(rw http.ResponseWriter, req *http.Request) {
|
2018-11-25 18:10:10 +01:00
|
|
|
list.RemoveAll()
|
2018-11-16 22:39:53 +01:00
|
|
|
rw.WriteHeader(http.StatusOK)
|
2018-12-01 22:45:41 +01:00
|
|
|
}
|
2018-09-07 16:45:02 +02:00
|
|
|
}
|
|
|
|
|
2018-12-01 23:59:33 +01:00
|
|
|
// NewDashboardHTMLHandler returns the dashboard html page
|
|
|
|
func NewDashboardHTMLHandler(config Config) http.HandlerFunc {
|
2018-12-01 22:45:41 +01:00
|
|
|
return func(rw http.ResponseWriter, req *http.Request) {
|
2018-11-16 22:39:53 +01:00
|
|
|
rw.Header().Add("Content-Type", "text/html")
|
2018-11-25 18:10:10 +01:00
|
|
|
t, err := template.New("dashboard template").Delims("<<", ">>").Parse(dashboardHTML)
|
|
|
|
if err != nil {
|
|
|
|
msg := fmt.Sprintf("could not parse dashboard html template: %v", err)
|
|
|
|
fmt.Println(msg)
|
|
|
|
http.Error(rw, msg, http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
t.Execute(rw, config)
|
2018-12-01 22:45:41 +01:00
|
|
|
}
|
2018-08-04 01:53:54 +02:00
|
|
|
}
|
|
|
|
|
2018-12-01 23:59:33 +01:00
|
|
|
// NewDashboardRetryHandler retries a request
|
2018-12-01 22:45:41 +01:00
|
|
|
func NewDashboardRetryHandler(list *CaptureList, next http.HandlerFunc) http.HandlerFunc {
|
|
|
|
return func(rw http.ResponseWriter, req *http.Request) {
|
2018-11-25 22:24:59 +01:00
|
|
|
id := req.URL.Path[strings.LastIndex(req.URL.Path, "/")+1:]
|
|
|
|
capture := list.Find(id)
|
|
|
|
if capture == nil {
|
|
|
|
http.Error(rw, "Item Not Found", http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
var reqBody []byte
|
|
|
|
capture.Req.Body, reqBody = drain(capture.Req.Body)
|
2018-12-02 17:57:32 +01:00
|
|
|
r, _ := http.NewRequest(capture.Req.Method, capture.Req.URL.String(), bytes.NewReader(reqBody))
|
2018-11-25 22:24:59 +01:00
|
|
|
r.Header = capture.Req.Header
|
|
|
|
next.ServeHTTP(rw, r)
|
2018-12-01 22:45:41 +01:00
|
|
|
}
|
2018-11-25 22:24:59 +01:00
|
|
|
}
|
|
|
|
|
2018-12-01 23:59:33 +01:00
|
|
|
// NewDashboardItemInfoHandler returns the full capture info
|
2018-12-01 22:45:41 +01:00
|
|
|
func NewDashboardItemInfoHandler(list *CaptureList) http.HandlerFunc {
|
|
|
|
return func(rw http.ResponseWriter, req *http.Request) {
|
2018-11-22 22:45:20 +01:00
|
|
|
id := req.URL.Path[strings.LastIndex(req.URL.Path, "/")+1:]
|
2018-11-25 18:10:10 +01:00
|
|
|
capture := list.Find(id)
|
2018-11-22 22:45:20 +01:00
|
|
|
if capture == nil {
|
|
|
|
http.Error(rw, "Item Not Found", http.StatusNotFound)
|
|
|
|
return
|
2018-09-16 16:18:39 +02:00
|
|
|
}
|
2018-11-22 22:45:20 +01:00
|
|
|
rw.Header().Add("Content-Type", "application/json")
|
2018-11-24 15:24:16 +01:00
|
|
|
json.NewEncoder(rw).Encode(dump(capture))
|
2018-12-01 22:45:41 +01:00
|
|
|
}
|
2018-08-02 00:36:23 +02:00
|
|
|
}
|
|
|
|
|
2019-03-10 16:15:15 +01:00
|
|
|
// NewPlugin setups plugins handlers for requests and resposes
|
2018-12-01 22:45:41 +01:00
|
|
|
func NewPlugin(next http.HandlerFunc) http.HandlerFunc {
|
2019-03-10 16:15:15 +01:00
|
|
|
files, err := ioutil.ReadDir(".")
|
2018-11-24 11:03:16 +01:00
|
|
|
if err != nil {
|
2019-03-10 16:15:15 +01:00
|
|
|
fmt.Println("error: could not read directory:", err)
|
2018-11-24 11:03:16 +01:00
|
|
|
return next
|
|
|
|
}
|
2019-03-10 16:15:15 +01:00
|
|
|
for _, file := range files {
|
|
|
|
if file.IsDir() {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if strings.HasSuffix(file.Name(), ".so") {
|
|
|
|
fmt.Printf("loading plugin '%s'\n", file.Name())
|
|
|
|
p, err := plugin.Open(file.Name())
|
|
|
|
if err != nil {
|
|
|
|
fmt.Println("error: could not open plugin:", err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
fn, err := p.Lookup("Handler")
|
|
|
|
if err != nil {
|
|
|
|
fmt.Println("error: could not find plugin Handler function:", err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
pluginHandler, ok := fn.(func(http.HandlerFunc) http.HandlerFunc)
|
|
|
|
if !ok {
|
|
|
|
fmt.Println("error: plugin Handler function should be 'func(http.HandlerFunc) http.HandlerFunc'")
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
next = pluginHandler(next)
|
|
|
|
}
|
2018-11-24 11:03:16 +01:00
|
|
|
}
|
2019-03-10 16:15:15 +01:00
|
|
|
return next
|
2018-11-24 11:03:16 +01:00
|
|
|
}
|
|
|
|
|
2018-12-01 23:59:33 +01:00
|
|
|
// NewRecorder saves all the traffic data
|
2018-12-01 22:45:41 +01:00
|
|
|
func NewRecorder(list *CaptureList, next http.HandlerFunc) http.HandlerFunc {
|
|
|
|
return func(rw http.ResponseWriter, req *http.Request) {
|
2018-11-24 15:24:16 +01:00
|
|
|
|
|
|
|
// save req body for later
|
|
|
|
var reqBody []byte
|
|
|
|
req.Body, reqBody = drain(req.Body)
|
2017-11-08 00:10:54 +01:00
|
|
|
|
2018-11-22 22:45:20 +01:00
|
|
|
rec := httptest.NewRecorder()
|
|
|
|
|
|
|
|
next.ServeHTTP(rec, req)
|
|
|
|
|
2018-11-24 15:24:16 +01:00
|
|
|
// respond
|
2018-11-22 22:45:20 +01:00
|
|
|
for k, v := range rec.HeaderMap {
|
|
|
|
rw.Header()[k] = v
|
|
|
|
}
|
|
|
|
rw.WriteHeader(rec.Code)
|
|
|
|
rw.Write(rec.Body.Bytes())
|
|
|
|
|
2018-11-24 15:24:16 +01:00
|
|
|
// record req and res
|
|
|
|
req.Body = ioutil.NopCloser(bytes.NewReader(reqBody))
|
2018-11-22 22:45:20 +01:00
|
|
|
res := rec.Result()
|
2018-11-25 18:10:10 +01:00
|
|
|
list.Insert(Capture{Req: req, Res: res})
|
2018-12-01 22:45:41 +01:00
|
|
|
}
|
2018-11-22 22:45:20 +01:00
|
|
|
}
|
|
|
|
|
2018-12-01 23:59:33 +01:00
|
|
|
// NewProxyHandler is the reverse proxy handler
|
2018-12-01 22:45:41 +01:00
|
|
|
func NewProxyHandler(URL string) http.HandlerFunc {
|
2018-11-22 22:45:20 +01:00
|
|
|
url, _ := url.Parse(URL)
|
|
|
|
proxy := httputil.NewSingleHostReverseProxy(url)
|
|
|
|
proxy.ErrorHandler = func(rw http.ResponseWriter, req *http.Request, err error) {
|
|
|
|
fmt.Printf("uh oh | %v | %s %s\n", err, req.Method, req.URL)
|
2018-11-29 20:52:14 +01:00
|
|
|
rw.WriteHeader(StatusInternalProxyError)
|
|
|
|
fmt.Fprintf(rw, "%v", err)
|
2018-11-22 22:45:20 +01:00
|
|
|
}
|
2018-12-01 22:45:41 +01:00
|
|
|
return func(rw http.ResponseWriter, req *http.Request) {
|
2018-11-22 22:45:20 +01:00
|
|
|
req.Host = url.Host
|
|
|
|
req.URL.Host = url.Host
|
|
|
|
req.URL.Scheme = url.Scheme
|
2018-11-11 17:54:35 +01:00
|
|
|
proxy.ServeHTTP(rw, req)
|
2018-12-01 22:45:41 +01:00
|
|
|
}
|
2018-08-04 01:53:54 +02:00
|
|
|
}
|
|
|
|
|
2018-11-24 15:24:16 +01:00
|
|
|
func dump(c *Capture) CaptureDump {
|
|
|
|
reqDump, err := dumpRequest(c.Req)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Printf("could not dump request: %v\n", err)
|
|
|
|
}
|
|
|
|
resDump, err := dumpResponse(c.Res)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Printf("could not dump response: %v\n", err)
|
|
|
|
}
|
2018-11-24 18:51:33 +01:00
|
|
|
strcurl, err := curl.New(c.Req)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Printf("could not convert request to curl: %v\n", err)
|
|
|
|
}
|
|
|
|
return CaptureDump{Request: string(reqDump), Response: string(resDump), Curl: strcurl}
|
2018-11-24 15:24:16 +01:00
|
|
|
}
|
|
|
|
|
2018-08-04 01:53:54 +02:00
|
|
|
func dumpRequest(req *http.Request) ([]byte, error) {
|
2018-11-11 17:54:35 +01:00
|
|
|
if req.Header.Get("Content-Encoding") == "gzip" {
|
2018-12-01 23:59:33 +01:00
|
|
|
return dumpGzipRequest(req)
|
2018-11-11 17:54:35 +01:00
|
|
|
}
|
2018-07-21 19:50:53 +02:00
|
|
|
return httputil.DumpRequest(req, true)
|
|
|
|
}
|
|
|
|
|
2018-12-01 23:59:33 +01:00
|
|
|
func dumpGzipRequest(req *http.Request) ([]byte, error) {
|
|
|
|
var reqBody []byte
|
|
|
|
req.Body, reqBody = drain(req.Body)
|
|
|
|
reader, _ := gzip.NewReader(bytes.NewReader(reqBody))
|
|
|
|
req.Body = ioutil.NopCloser(reader)
|
|
|
|
reqDump, err := httputil.DumpRequest(req, true)
|
|
|
|
req.Body = ioutil.NopCloser(bytes.NewReader(reqBody))
|
|
|
|
return reqDump, err
|
|
|
|
}
|
|
|
|
|
2018-08-04 01:53:54 +02:00
|
|
|
func dumpResponse(res *http.Response) ([]byte, error) {
|
2018-11-29 20:52:14 +01:00
|
|
|
if res.StatusCode == StatusInternalProxyError {
|
2018-12-01 23:59:33 +01:00
|
|
|
return dumpInternalProxyError(res)
|
2018-11-29 20:52:14 +01:00
|
|
|
}
|
2017-11-15 15:37:54 +01:00
|
|
|
if res.Header.Get("Content-Encoding") == "gzip" {
|
2018-12-01 23:59:33 +01:00
|
|
|
return dumpGzipResponse(res)
|
2017-11-15 15:37:54 +01:00
|
|
|
}
|
2018-11-11 17:54:35 +01:00
|
|
|
return httputil.DumpResponse(res, true)
|
2017-11-15 15:37:54 +01:00
|
|
|
}
|
2018-09-07 16:45:02 +02:00
|
|
|
|
2018-12-01 23:59:33 +01:00
|
|
|
// Dumps only the body when we have an proxy error.
|
|
|
|
// This body is set in NewProxyHandler() in proxy.ErrorHandler
|
|
|
|
func dumpInternalProxyError(res *http.Response) ([]byte, error) {
|
|
|
|
var resBody []byte
|
|
|
|
res.Body, resBody = drain(res.Body)
|
|
|
|
return resBody, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func dumpGzipResponse(res *http.Response) ([]byte, error) {
|
|
|
|
var resBody []byte
|
|
|
|
res.Body, resBody = drain(res.Body)
|
|
|
|
reader, _ := gzip.NewReader(bytes.NewReader(resBody))
|
|
|
|
res.Body = ioutil.NopCloser(reader)
|
|
|
|
resDump, err := httputil.DumpResponse(res, true)
|
|
|
|
res.Body = ioutil.NopCloser(bytes.NewReader(resBody))
|
|
|
|
return resDump, err
|
|
|
|
}
|
|
|
|
|
2018-11-22 22:45:20 +01:00
|
|
|
func drain(b io.ReadCloser) (io.ReadCloser, []byte) {
|
|
|
|
all, _ := ioutil.ReadAll(b)
|
|
|
|
b.Close()
|
|
|
|
return ioutil.NopCloser(bytes.NewReader(all)), all
|
|
|
|
}
|