From 1bbb63b34e593267db4332e6b9b276289e4685c4 Mon Sep 17 00:00:00 2001 From: therealbobo Date: Tue, 15 Sep 2020 12:57:21 +0200 Subject: multistage build --- Dockerfile | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 0092628..07f275e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,11 +1,12 @@ -FROM ubuntu:20.04 +# BUILD STAGE +FROM ubuntu:20.04 AS BUILDSTAGE # Install tools and libraries -RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -qq git golang-1.14 pkg-config libpcap-dev libhyperscan-dev yarnpkg +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install -qq git golang-1.14 pkg-config libpcap-dev libhyperscan-dev yarnpkg curl RUN ln -sf ../lib/go-1.14/bin/go /usr/bin/go -ENV GIN_MODE release COPY . /caronte @@ -13,6 +14,23 @@ WORKDIR /caronte RUN go mod download && go build -RUN cd frontend && yarnpkg install && yarnpkg build +RUN cd frontend && \ + yarnpkg install && \ + yarnpkg build --production=true +RUN curl -sf https://gobinaries.com/tj/node-prune | sh && cd /caronte/frontend && node-prune + + +# LAST STAGE +FROM ubuntu:20.04 + +COPY --from=BUILDSTAGE /caronte /caronte + +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install -qq libpcap-dev libhyperscan-dev && \ + rm -rf /var/lib/apt/lists/* + +ENV GIN_MODE release + +WORKDIR /caronte CMD ./caronte -- cgit v1.2.3-70-g09d2 From 5a6f17b6439e42e829a9489575400492bcd10655 Mon Sep 17 00:00:00 2001 From: therealbobo Date: Tue, 15 Sep 2020 18:35:24 +0200 Subject: minor change --- Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 07f275e..e3c49bc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,7 +23,9 @@ RUN curl -sf https://gobinaries.com/tj/node-prune | sh && cd /caronte/frontend & # LAST STAGE FROM ubuntu:20.04 -COPY --from=BUILDSTAGE /caronte /caronte +COPY --from=BUILDSTAGE /caronte/caronte /caronte/caronte +COPY --from=BUILDSTAGE /caronte/frontend /caronte/frontend +COPY --from=BUILDSTAGE /caronte/shared /caronte/shared RUN apt-get update && \ DEBIAN_FRONTEND=noninteractive apt-get install -qq libpcap-dev libhyperscan-dev && \ -- cgit v1.2.3-70-g09d2 From 2954045cb28ea8cbf4dbd019355a2df8fed28ccc Mon Sep 17 00:00:00 2001 From: Emiliano Ciavatta Date: Tue, 15 Sep 2020 23:17:06 +0200 Subject: Refactor gzip decoder, added parsers with reproducers --- connection_streams_controller.go | 59 ++++++++++------ go.mod | 1 + go.sum | 2 + parsers/http_request_parser.go | 144 +++++++++++++++++++++++++++++++++++++++ parsers/http_response_parser.go | 72 ++++++++++++++++++++ parsers/parser.go | 28 ++++++++ parsers/parser_utils.go | 24 +++++++ utils.go | 82 ---------------------- 8 files changed, 310 insertions(+), 102 deletions(-) create mode 100644 parsers/http_request_parser.go create mode 100644 parsers/http_response_parser.go create mode 100644 parsers/parser.go create mode 100644 parsers/parser_utils.go diff --git a/connection_streams_controller.go b/connection_streams_controller.go index 096210e..3ba30f8 100644 --- a/connection_streams_controller.go +++ b/connection_streams_controller.go @@ -1,7 +1,9 @@ package main import ( + "bytes" "context" + "github.com/eciavatta/caronte/parsers" log "github.com/sirupsen/logrus" "time" ) @@ -25,13 +27,13 @@ type ConnectionStream struct { type PatternSlice [2]uint64 type Payload struct { - FromClient bool `json:"from_client"` - Content string `json:"content"` - DecodedContent string `json:"decoded_content"` - Index int `json:"index"` - Timestamp time.Time `json:"timestamp"` - IsRetransmitted bool `json:"is_retransmitted"` - RegexMatches []RegexSlice `json:"regex_matches"` + FromClient bool `json:"from_client"` + Content string `json:"content"` + Metadata parsers.Metadata `json:"metadata"` + Index int `json:"index"` + Timestamp time.Time `json:"timestamp"` + IsRetransmitted bool `json:"is_retransmitted"` + RegexMatches []RegexSlice `json:"regex_matches"` } type RegexSlice struct { @@ -56,8 +58,8 @@ func NewConnectionStreamsController(storage Storage) ConnectionStreamsController } func (csc ConnectionStreamsController) GetConnectionPayload(c context.Context, connectionID RowID, - format QueryFormat) []Payload { - payloads := make([]Payload, 0, InitialPayloadsSize) + format QueryFormat) []*Payload { + payloads := make([]*Payload, 0, InitialPayloadsSize) var clientIndex, serverIndex, globalIndex uint64 if format.Limit <= 0 { @@ -76,7 +78,11 @@ func (csc ConnectionStreamsController) GetConnectionPayload(c context.Context, c return serverBlocksIndex < len(serverStream.BlocksIndexes) } - var payload Payload + var payload *Payload + payloadsBuffer := make([]*Payload, 0, 16) + contentChunkBuffer := new(bytes.Buffer) + var lastContentSlice []byte + var sideChanged, lastClient, lastServer bool for !clientStream.ID.IsZero() || !serverStream.ID.IsZero() { if hasClientBlocks() && (!hasServerBlocks() || // next payload is from client clientStream.BlocksTimestamps[clientBlocksIndex].UnixNano() <= @@ -90,10 +96,9 @@ func (csc ConnectionStreamsController) GetConnectionPayload(c context.Context, c } size := uint64(end - start) - payload = Payload{ + payload = &Payload{ FromClient: true, Content: DecodeBytes(clientStream.Payload[start:end], format.Format), - //Request: ReadRequest(content), Index: start, Timestamp: clientStream.BlocksTimestamps[clientBlocksIndex], IsRetransmitted: clientStream.BlocksLoss[clientBlocksIndex], @@ -102,6 +107,9 @@ func (csc ConnectionStreamsController) GetConnectionPayload(c context.Context, c clientIndex += size globalIndex += size clientBlocksIndex++ + + lastContentSlice = clientStream.Payload[start:end] + sideChanged, lastClient, lastServer = lastServer, true, false } else { // next payload is from server start := serverStream.BlocksIndexes[serverBlocksIndex] end := 0 @@ -112,15 +120,9 @@ func (csc ConnectionStreamsController) GetConnectionPayload(c context.Context, c } size := uint64(end - start) - content := DecodeBytes(serverStream.Payload[start:end], format.Format) - - plainContent := DecodeBytes(serverStream.Payload[start:end], "default") - decodedContent := DecodeBytes([]byte(DecodeHttpResponse(plainContent)), format.Format) - - payload = Payload{ + payload = &Payload{ FromClient: false, - Content: content, - DecodedContent: decodedContent, + Content: DecodeBytes(serverStream.Payload[start:end], format.Format), Index: start, Timestamp: serverStream.BlocksTimestamps[serverBlocksIndex], IsRetransmitted: serverStream.BlocksLoss[serverBlocksIndex], @@ -129,12 +131,29 @@ func (csc ConnectionStreamsController) GetConnectionPayload(c context.Context, c serverIndex += size globalIndex += size serverBlocksIndex++ + + lastContentSlice = serverStream.Payload[start:end] + sideChanged, lastClient, lastServer = lastClient, false, true + } + + if sideChanged { + metadata := parsers.Parse(contentChunkBuffer.Bytes()) + for _, elem := range payloadsBuffer { + elem.Metadata = metadata + } + + payloadsBuffer = payloadsBuffer[:0] + contentChunkBuffer.Reset() } + payloadsBuffer = append(payloadsBuffer, payload) + contentChunkBuffer.Write(lastContentSlice) if globalIndex > format.Skip { + // problem: waste of time if the payload is discarded payloads = append(payloads, payload) } if globalIndex > format.Skip+format.Limit { + // problem: the last chunk is not parsed, but can be ok because it is not finished return payloads } diff --git a/go.mod b/go.mod index 1281ae8..308b16b 100644 --- a/go.mod +++ b/go.mod @@ -17,4 +17,5 @@ require ( go.mongodb.org/mongo-driver v1.3.1 golang.org/x/net v0.0.0-20190620200207-3b0461eec859 // indirect golang.org/x/sys v0.0.0-20200406155108-e3b113bbe6a4 // indirect + moul.io/http2curl v1.0.0 ) diff --git a/go.sum b/go.sum index d17dea6..fd63c39 100644 --- a/go.sum +++ b/go.sum @@ -179,3 +179,5 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +moul.io/http2curl v1.0.0 h1:6XwpyZOYsgZJrU8exnG87ncVkU1FVCcTRpwzOkTDUi8= +moul.io/http2curl v1.0.0/go.mod h1:f6cULg+e4Md/oW1cYmwW4IWQOVl2lGbmCNGOHvzX2kE= diff --git a/parsers/http_request_parser.go b/parsers/http_request_parser.go new file mode 100644 index 0000000..d204d4c --- /dev/null +++ b/parsers/http_request_parser.go @@ -0,0 +1,144 @@ +package parsers + +import ( + "bufio" + "bytes" + "encoding/json" + "io/ioutil" + "moul.io/http2curl" + "net/http" + "strings" +) + +type HttpRequestMetadata struct { + BasicMetadata + Method string `json:"method"` + URL string `json:"url"` + Protocol string `json:"protocol"` + Host string `json:"host"` + Headers map[string]string `json:"headers"` + Cookies map[string]string `json:"cookies" binding:"omitempty"` + ContentLength int64 `json:"content_length"` + FormData map[string]string `json:"form_data" binding:"omitempty"` + Body string `json:"body" binding:"omitempty"` + Trailer map[string]string `json:"trailer" binding:"omitempty"` + Reproducers HttpRequestMetadataReproducers `json:"reproducers"` +} + +type HttpRequestMetadataReproducers struct { + CurlCommand string `json:"curl_command"` + RequestsCode string `json:"requests_code"` + FetchRequest string `json:"fetch_request"` +} + +type HttpRequestParser struct { +} + +func (p HttpRequestParser) TryParse(content []byte) Metadata { + reader := bufio.NewReader(bytes.NewReader(content)) + request, err := http.ReadRequest(reader) + if err != nil { + return nil + } + var body string + if request.Body != nil { + if buffer, err := ioutil.ReadAll(request.Body); err == nil { + body = string(buffer) + } + _ = request.Body.Close() + } + _ = request.ParseForm() + + return HttpRequestMetadata{ + BasicMetadata: BasicMetadata{"http-request"}, + Method: request.Method, + URL: request.URL.String(), + Protocol: request.Proto, + Host: request.Host, + Headers: JoinArrayMap(request.Header), + Cookies: CookiesMap(request.Cookies()), + ContentLength: request.ContentLength, + FormData: JoinArrayMap(request.Form), + Body: body, + Trailer: JoinArrayMap(request.Trailer), + Reproducers: HttpRequestMetadataReproducers{ + CurlCommand: curlCommand(request), + RequestsCode: requestsCode(request), + FetchRequest: fetchRequest(request, body), + }, + } +} + +func curlCommand(request *http.Request) string { + if command, err := http2curl.GetCurlCommand(request); err == nil { + return command.String() + } else { + return "invalid-request" + } +} + +func requestsCode(request *http.Request) string { + var b strings.Builder + var params string + if request.Form != nil { + params = toJson(JoinArrayMap(request.PostForm)) + } + headers := toJson(JoinArrayMap(request.Header)) + cookies := toJson(CookiesMap(request.Cookies())) + + b.WriteString("import requests\n\nresponse = requests." + strings.ToLower(request.Method) + "(") + b.WriteString("\"" + request.URL.String() + "\"") + if params != "" { + b.WriteString(", data = " + params) + } + if headers != "" { + b.WriteString(", headers = " + headers) + } + if cookies != "" { + b.WriteString(", cookies = " + cookies) + } + b.WriteString(")\n") + b.WriteString(` +# print(response.url) +# print(response.text) +# print(response.content) +# print(response.json()) +# print(response.raw) +# print(response.status_code) +# print(response.cookies) +# print(response.history) +`) + + return b.String() +} + +func fetchRequest(request *http.Request, body string) string { + headers := JoinArrayMap(request.Header) + data := make(map[string]interface{}) + data["headers"] = headers + if referrer := request.Header.Get("referrer"); referrer != "" { + data["Referrer"] = referrer + } + // TODO: referrerPolicy + if body == "" { + data["body"] = nil + } else { + data["body"] = body + } + data["method"] = request.Method + // TODO: mode + + if jsonData := toJson(data); jsonData != "" { + return "fetch(\"" + request.URL.String() + "\", " + jsonData + ");" + } else { + return "invalid-request" + } +} + +func toJson(obj interface{}) string { + if buffer, err := json.Marshal(obj); err == nil { + return string(buffer) + } else { + return "" + } +} diff --git a/parsers/http_response_parser.go b/parsers/http_response_parser.go new file mode 100644 index 0000000..a639dec --- /dev/null +++ b/parsers/http_response_parser.go @@ -0,0 +1,72 @@ +package parsers + +import ( + "bufio" + "bytes" + "compress/gzip" + "io/ioutil" + "net/http" +) + +type HttpResponseMetadata struct { + BasicMetadata + Status string `json:"status"` + StatusCode int `json:"status_code"` + Protocol string `json:"protocol"` + Headers map[string]string `json:"headers"` + ConnectionClosed bool `json:"connection_closed"` + Cookies map[string]string `json:"cookies" binding:"omitempty"` + Location string `json:"location" binding:"omitempty"` + Compressed bool `json:"compressed"` + Body string `json:"body" binding:"omitempty"` + Trailer map[string]string `json:"trailer" binding:"omitempty"` +} + +type HttpResponseParser struct { +} + +func (p HttpResponseParser) TryParse(content []byte) Metadata { + reader := bufio.NewReader(bytes.NewReader(content)) + response, err := http.ReadResponse(reader, nil) + if err != nil { + return nil + } + var body string + var compressed bool + if response.Body != nil { + switch response.Header.Get("Content-Encoding") { + case "gzip": + if gzipReader, err := gzip.NewReader(response.Body); err == nil { + if buffer, err := ioutil.ReadAll(gzipReader); err == nil { + body = string(buffer) + compressed = true + } + _ = gzipReader.Close() + } + default: + if buffer, err := ioutil.ReadAll(response.Body); err == nil { + body = string(buffer) + } + } + _ = response.Body.Close() + } + + var location string + if locationUrl, err := response.Location(); err == nil { + location = locationUrl.String() + } + + return HttpResponseMetadata{ + BasicMetadata: BasicMetadata{"http-response"}, + Status: response.Status, + StatusCode: response.StatusCode, + Protocol: response.Proto, + Headers: JoinArrayMap(response.Header), + ConnectionClosed: response.Close, + Cookies: CookiesMap(response.Cookies()), + Location: location, + Compressed: compressed, + Body: body, + Trailer: JoinArrayMap(response.Trailer), + } +} diff --git a/parsers/parser.go b/parsers/parser.go new file mode 100644 index 0000000..06cc0dc --- /dev/null +++ b/parsers/parser.go @@ -0,0 +1,28 @@ +package parsers + +type Parser interface { + TryParse(content []byte) Metadata + +} + +type Metadata interface { +} + +type BasicMetadata struct { + Type string `json:"type"` +} + +var parsers = []Parser{ // order matter + HttpRequestParser{}, + HttpResponseParser{}, +} + +func Parse(content []byte) Metadata { + for _, parser := range parsers { + if metadata := parser.TryParse(content); metadata != nil { + return metadata + } + } + + return nil +} diff --git a/parsers/parser_utils.go b/parsers/parser_utils.go new file mode 100644 index 0000000..b688262 --- /dev/null +++ b/parsers/parser_utils.go @@ -0,0 +1,24 @@ +package parsers + +import ( + "net/http" + "strings" +) + +func JoinArrayMap(obj map[string][]string) map[string]string { + headers := make(map[string]string, len(obj)) + for key, value := range obj { + headers[key] = strings.Join(value, ";") + } + + return headers +} + +func CookiesMap(cookiesArray []*http.Cookie) map[string]string { + cookies := make(map[string]string, len(cookiesArray)) + for _, cookie := range cookiesArray { + cookies[cookie.Name] = cookie.Value + } + + return cookies +} diff --git a/utils.go b/utils.go index b07244d..a14fdca 100644 --- a/utils.go +++ b/utils.go @@ -13,11 +13,6 @@ import ( "net" "os" "time" - "net/http" - "bufio" - "strings" - "io/ioutil" - "compress/gzip" ) func Sha256Sum(fileName string) (string, error) { @@ -113,83 +108,6 @@ func DecodeBytes(buffer []byte, format string) string { } } -func ReadRequest(raw string) http.Request { - reader := bufio.NewReader(strings.NewReader(raw)) - req,err := http.ReadRequest(reader) - if err != nil{ - log.Info("Reading request: ",req) - return http.Request{} - } - return *req -} - -func GetHeader(raw string) string{ - tmp := strings.Split(raw,"\r\n") - end := len(tmp) - for i, line := range tmp{ - if line == ""{ - end = i - break - } - } - return strings.Join(tmp[:end],"\r\n") -} - -func GetBody(raw string) string{ - tmp := strings.Split(raw,"\r\n") - start := 0 - for i, line := range tmp{ - if line == ""{ - start = i + 2 - break - } - } - return strings.Join(tmp[start:],"\r\n") -} - -func DecodeHttpResponse(raw string) string { - body := []byte{} - reader := bufio.NewReader(strings.NewReader(raw)) - resp,err := http.ReadResponse(reader, &http.Request{}) - if err != nil{ - log.Info("Reading response: ",resp) - return "" - } - - defer resp.Body.Close() - - if resp.StatusCode >= 200 && resp.StatusCode < 300 { - var bodyReader io.ReadCloser - switch resp.Header.Get("Content-Encoding") { - case "gzip": - bodyReader, err = gzip.NewReader(resp.Body) - if err != nil { - log.Error("Gunzipping body: ",err) - } - defer bodyReader.Close() - body, err = ioutil.ReadAll(bodyReader) - if err != nil{ - log.Error("Reading gzipped body: ",err) - // if the response is malformed - // or the connection is closed - fallbackReader, _ := gzip.NewReader(strings.NewReader(GetBody(raw))) - body, err = ioutil.ReadAll(fallbackReader) - if err != nil{ - log.Error(string(body)) - } - } - default: - bodyReader = resp.Body - body, err = ioutil.ReadAll(bodyReader) - if err != nil{ - log.Error("Reading body: ",err) - body = []byte(GetBody(raw)) - } - } - } - return GetHeader(raw) + "\r\n\r\n"+ string(body) -} - func CopyFile(dst, src string) error { in, err := os.Open(src) if err != nil { -- cgit v1.2.3-70-g09d2 From ec949ffea86a14526a7142d048022a4a07f684ff Mon Sep 17 00:00:00 2001 From: Emiliano Ciavatta Date: Wed, 16 Sep 2020 15:41:04 +0200 Subject: Improve frontend connection visualization --- connection_streams_controller.go | 18 +-- frontend/src/components/Connection.js | 7 +- frontend/src/components/ConnectionContent.js | 146 +++++++++++++++++-------- frontend/src/components/ConnectionContent.scss | 101 ++++++++++++++--- frontend/src/components/MessageAction.js | 45 ++++++++ frontend/src/components/MessageAction.scss | 11 ++ frontend/src/utils.js | 5 + parsers/http_request_parser.go | 2 +- 8 files changed, 269 insertions(+), 66 deletions(-) create mode 100644 frontend/src/components/MessageAction.js create mode 100644 frontend/src/components/MessageAction.scss diff --git a/connection_streams_controller.go b/connection_streams_controller.go index 3ba30f8..c4876b1 100644 --- a/connection_streams_controller.go +++ b/connection_streams_controller.go @@ -27,13 +27,14 @@ type ConnectionStream struct { type PatternSlice [2]uint64 type Payload struct { - FromClient bool `json:"from_client"` - Content string `json:"content"` - Metadata parsers.Metadata `json:"metadata"` - Index int `json:"index"` - Timestamp time.Time `json:"timestamp"` - IsRetransmitted bool `json:"is_retransmitted"` - RegexMatches []RegexSlice `json:"regex_matches"` + FromClient bool `json:"from_client"` + Content string `json:"content"` + Metadata parsers.Metadata `json:"metadata"` + IsMetadataContinuation bool `json:"is_metadata_continuation"` + Index int `json:"index"` + Timestamp time.Time `json:"timestamp"` + IsRetransmitted bool `json:"is_retransmitted"` + RegexMatches []RegexSlice `json:"regex_matches"` } type RegexSlice struct { @@ -138,8 +139,11 @@ func (csc ConnectionStreamsController) GetConnectionPayload(c context.Context, c if sideChanged { metadata := parsers.Parse(contentChunkBuffer.Bytes()) + var isMetadataContinuation bool for _, elem := range payloadsBuffer { elem.Metadata = metadata + elem.IsMetadataContinuation = isMetadataContinuation + isMetadataContinuation = true } payloadsBuffer = payloadsBuffer[:0] diff --git a/frontend/src/components/Connection.js b/frontend/src/components/Connection.js index e41f542..93c6438 100644 --- a/frontend/src/components/Connection.js +++ b/frontend/src/components/Connection.js @@ -57,6 +57,11 @@ class Connection extends Component { let closedAt = new Date(conn.closed_at); let processedAt = new Date(conn.processed_at); let duration = ((closedAt - startedAt) / 1000).toFixed(3); + if (duration > 1000 || duration < -1000) { + duration = "∞"; + } else { + duration += "s"; + } let timeInfo =
{key}: {value}
+ ); + + let m = connectionMessage.metadata; + switch (m.type) { + case "http-request": + let url = {m.host}{m.url}; + return +{m.method} {url} {m.protocol}
+ {unrollMap(m.headers)} +{m.protocol} {m.status}
+ {unrollMap(m.headers)} +{payload}+ {this.state.messageActionDialog}
+ {this.props.actionValue} ++
{m.method} {url} {m.protocol}
+{m.method} {url} {m.protocol}
{unrollMap(m.headers)}{m.protocol} {m.status}
+{m.protocol} {m.status}
{unrollMap(m.headers)}- {this.props.actionValue} --