aboutsummaryrefslogtreecommitdiff
path: root/parsers
diff options
context:
space:
mode:
authorEmiliano Ciavatta2020-09-16 15:49:50 +0000
committerEmiliano Ciavatta2020-09-16 15:49:50 +0000
commitdfd6d543074b4a30c2fc990063ca69ebf8a734e1 (patch)
treea022ad9861b35c38ad1d0945d79470adfe26ae52 /parsers
parentec949ffea86a14526a7142d048022a4a07f684ff (diff)
Fix body decoding bugs. Improve frontend
Diffstat (limited to 'parsers')
-rw-r--r--parsers/http_request_parser.go21
-rw-r--r--parsers/http_response_parser.go31
2 files changed, 31 insertions, 21 deletions
diff --git a/parsers/http_request_parser.go b/parsers/http_request_parser.go
index cfac196..e2224b8 100644
--- a/parsers/http_request_parser.go
+++ b/parsers/http_request_parser.go
@@ -4,6 +4,7 @@ import (
"bufio"
"bytes"
"encoding/json"
+ log "github.com/sirupsen/logrus"
"io/ioutil"
"moul.io/http2curl"
"net/http"
@@ -41,12 +42,13 @@ func (p HttpRequestParser) TryParse(content []byte) Metadata {
return nil
}
var body string
- if request.Body != nil {
- if buffer, err := ioutil.ReadAll(request.Body); err == nil {
- body = string(buffer)
- }
- _ = request.Body.Close()
+ if buffer, err := ioutil.ReadAll(request.Body); err == nil {
+ body = string(buffer)
+ } else {
+ log.WithError(err).Error("failed to read body in http_request_parser")
+ return nil
}
+ _ = request.Body.Close()
_ = request.ParseForm()
return HttpRequestMetadata{
@@ -62,18 +64,21 @@ func (p HttpRequestParser) TryParse(content []byte) Metadata {
Body: body,
Trailer: JoinArrayMap(request.Trailer),
Reproducers: HttpRequestMetadataReproducers{
- CurlCommand: curlCommand(request),
+ CurlCommand: curlCommand(content),
RequestsCode: requestsCode(request),
FetchRequest: fetchRequest(request, body),
},
}
}
-func curlCommand(request *http.Request) string {
+func curlCommand(content []byte) string {
+ // a new reader is required because all the body is read before and GetBody() doesn't works
+ reader := bufio.NewReader(bytes.NewReader(content))
+ request, _ := http.ReadRequest(reader)
if command, err := http2curl.GetCurlCommand(request); err == nil {
return command.String()
} else {
- return "invalid-request"
+ return err.Error()
}
}
diff --git a/parsers/http_response_parser.go b/parsers/http_response_parser.go
index a639dec..1770116 100644
--- a/parsers/http_response_parser.go
+++ b/parsers/http_response_parser.go
@@ -4,6 +4,7 @@ import (
"bufio"
"bytes"
"compress/gzip"
+ log "github.com/sirupsen/logrus"
"io/ioutil"
"net/http"
)
@@ -33,23 +34,27 @@ func (p HttpResponseParser) TryParse(content []byte) Metadata {
}
var body string
var compressed bool
- if response.Body != nil {
- switch response.Header.Get("Content-Encoding") {
- case "gzip":
- if gzipReader, err := gzip.NewReader(response.Body); err == nil {
- if buffer, err := ioutil.ReadAll(gzipReader); err == nil {
- body = string(buffer)
- compressed = true
- }
- _ = gzipReader.Close()
- }
- default:
- if buffer, err := ioutil.ReadAll(response.Body); err == nil {
+ switch response.Header.Get("Content-Encoding") {
+ case "gzip":
+ if gzipReader, err := gzip.NewReader(response.Body); err == nil {
+ if buffer, err := ioutil.ReadAll(gzipReader); err == nil {
body = string(buffer)
+ compressed = true
+ } else {
+ log.WithError(err).Error("failed to read gzipped body in http_response_parser")
+ return nil
}
+ _ = gzipReader.Close()
+ }
+ default:
+ if buffer, err := ioutil.ReadAll(response.Body); err == nil {
+ body = string(buffer)
+ } else {
+ log.WithError(err).Error("failed to read body in http_response_parser")
+ return nil
}
- _ = response.Body.Close()
}
+ _ = response.Body.Close()
var location string
if locationUrl, err := response.Location(); err == nil {