aboutsummaryrefslogtreecommitdiff
path: root/parsers
diff options
context:
space:
mode:
Diffstat (limited to 'parsers')
-rw-r--r--parsers/http_request_parser.go144
-rw-r--r--parsers/http_response_parser.go72
-rw-r--r--parsers/parser.go28
-rw-r--r--parsers/parser_utils.go24
4 files changed, 268 insertions, 0 deletions
diff --git a/parsers/http_request_parser.go b/parsers/http_request_parser.go
new file mode 100644
index 0000000..d204d4c
--- /dev/null
+++ b/parsers/http_request_parser.go
@@ -0,0 +1,144 @@
+package parsers
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/json"
+ "io/ioutil"
+ "moul.io/http2curl"
+ "net/http"
+ "strings"
+)
+
+type HttpRequestMetadata struct {
+ BasicMetadata
+ Method string `json:"method"`
+ URL string `json:"url"`
+ Protocol string `json:"protocol"`
+ Host string `json:"host"`
+ Headers map[string]string `json:"headers"`
+ Cookies map[string]string `json:"cookies" binding:"omitempty"`
+ ContentLength int64 `json:"content_length"`
+ FormData map[string]string `json:"form_data" binding:"omitempty"`
+ Body string `json:"body" binding:"omitempty"`
+ Trailer map[string]string `json:"trailer" binding:"omitempty"`
+ Reproducers HttpRequestMetadataReproducers `json:"reproducers"`
+}
+
+type HttpRequestMetadataReproducers struct {
+ CurlCommand string `json:"curl_command"`
+ RequestsCode string `json:"requests_code"`
+ FetchRequest string `json:"fetch_request"`
+}
+
+type HttpRequestParser struct {
+}
+
+func (p HttpRequestParser) TryParse(content []byte) Metadata {
+ reader := bufio.NewReader(bytes.NewReader(content))
+ request, err := http.ReadRequest(reader)
+ if err != nil {
+ return nil
+ }
+ var body string
+ if request.Body != nil {
+ if buffer, err := ioutil.ReadAll(request.Body); err == nil {
+ body = string(buffer)
+ }
+ _ = request.Body.Close()
+ }
+ _ = request.ParseForm()
+
+ return HttpRequestMetadata{
+ BasicMetadata: BasicMetadata{"http-request"},
+ Method: request.Method,
+ URL: request.URL.String(),
+ Protocol: request.Proto,
+ Host: request.Host,
+ Headers: JoinArrayMap(request.Header),
+ Cookies: CookiesMap(request.Cookies()),
+ ContentLength: request.ContentLength,
+ FormData: JoinArrayMap(request.Form),
+ Body: body,
+ Trailer: JoinArrayMap(request.Trailer),
+ Reproducers: HttpRequestMetadataReproducers{
+ CurlCommand: curlCommand(request),
+ RequestsCode: requestsCode(request),
+ FetchRequest: fetchRequest(request, body),
+ },
+ }
+}
+
+func curlCommand(request *http.Request) string {
+ if command, err := http2curl.GetCurlCommand(request); err == nil {
+ return command.String()
+ } else {
+ return "invalid-request"
+ }
+}
+
+func requestsCode(request *http.Request) string {
+ var b strings.Builder
+ var params string
+ if request.Form != nil {
+ params = toJson(JoinArrayMap(request.PostForm))
+ }
+ headers := toJson(JoinArrayMap(request.Header))
+ cookies := toJson(CookiesMap(request.Cookies()))
+
+ b.WriteString("import requests\n\nresponse = requests." + strings.ToLower(request.Method) + "(")
+ b.WriteString("\"" + request.URL.String() + "\"")
+ if params != "" {
+ b.WriteString(", data = " + params)
+ }
+ if headers != "" {
+ b.WriteString(", headers = " + headers)
+ }
+ if cookies != "" {
+ b.WriteString(", cookies = " + cookies)
+ }
+ b.WriteString(")\n")
+ b.WriteString(`
+# print(response.url)
+# print(response.text)
+# print(response.content)
+# print(response.json())
+# print(response.raw)
+# print(response.status_code)
+# print(response.cookies)
+# print(response.history)
+`)
+
+ return b.String()
+}
+
+func fetchRequest(request *http.Request, body string) string {
+ headers := JoinArrayMap(request.Header)
+ data := make(map[string]interface{})
+ data["headers"] = headers
+ if referrer := request.Header.Get("referrer"); referrer != "" {
+ data["Referrer"] = referrer
+ }
+ // TODO: referrerPolicy
+ if body == "" {
+ data["body"] = nil
+ } else {
+ data["body"] = body
+ }
+ data["method"] = request.Method
+ // TODO: mode
+
+ if jsonData := toJson(data); jsonData != "" {
+ return "fetch(\"" + request.URL.String() + "\", " + jsonData + ");"
+ } else {
+ return "invalid-request"
+ }
+}
+
+func toJson(obj interface{}) string {
+ if buffer, err := json.Marshal(obj); err == nil {
+ return string(buffer)
+ } else {
+ return ""
+ }
+}
diff --git a/parsers/http_response_parser.go b/parsers/http_response_parser.go
new file mode 100644
index 0000000..a639dec
--- /dev/null
+++ b/parsers/http_response_parser.go
@@ -0,0 +1,72 @@
+package parsers
+
+import (
+ "bufio"
+ "bytes"
+ "compress/gzip"
+ "io/ioutil"
+ "net/http"
+)
+
+type HttpResponseMetadata struct {
+ BasicMetadata
+ Status string `json:"status"`
+ StatusCode int `json:"status_code"`
+ Protocol string `json:"protocol"`
+ Headers map[string]string `json:"headers"`
+ ConnectionClosed bool `json:"connection_closed"`
+ Cookies map[string]string `json:"cookies" binding:"omitempty"`
+ Location string `json:"location" binding:"omitempty"`
+ Compressed bool `json:"compressed"`
+ Body string `json:"body" binding:"omitempty"`
+ Trailer map[string]string `json:"trailer" binding:"omitempty"`
+}
+
+type HttpResponseParser struct {
+}
+
+func (p HttpResponseParser) TryParse(content []byte) Metadata {
+ reader := bufio.NewReader(bytes.NewReader(content))
+ response, err := http.ReadResponse(reader, nil)
+ if err != nil {
+ return nil
+ }
+ var body string
+ var compressed bool
+ if response.Body != nil {
+ switch response.Header.Get("Content-Encoding") {
+ case "gzip":
+ if gzipReader, err := gzip.NewReader(response.Body); err == nil {
+ if buffer, err := ioutil.ReadAll(gzipReader); err == nil {
+ body = string(buffer)
+ compressed = true
+ }
+ _ = gzipReader.Close()
+ }
+ default:
+ if buffer, err := ioutil.ReadAll(response.Body); err == nil {
+ body = string(buffer)
+ }
+ }
+ _ = response.Body.Close()
+ }
+
+ var location string
+ if locationUrl, err := response.Location(); err == nil {
+ location = locationUrl.String()
+ }
+
+ return HttpResponseMetadata{
+ BasicMetadata: BasicMetadata{"http-response"},
+ Status: response.Status,
+ StatusCode: response.StatusCode,
+ Protocol: response.Proto,
+ Headers: JoinArrayMap(response.Header),
+ ConnectionClosed: response.Close,
+ Cookies: CookiesMap(response.Cookies()),
+ Location: location,
+ Compressed: compressed,
+ Body: body,
+ Trailer: JoinArrayMap(response.Trailer),
+ }
+}
diff --git a/parsers/parser.go b/parsers/parser.go
new file mode 100644
index 0000000..06cc0dc
--- /dev/null
+++ b/parsers/parser.go
@@ -0,0 +1,28 @@
+package parsers
+
+type Parser interface {
+ TryParse(content []byte) Metadata
+
+}
+
+type Metadata interface {
+}
+
+type BasicMetadata struct {
+ Type string `json:"type"`
+}
+
+var parsers = []Parser{ // order matter
+ HttpRequestParser{},
+ HttpResponseParser{},
+}
+
+func Parse(content []byte) Metadata {
+ for _, parser := range parsers {
+ if metadata := parser.TryParse(content); metadata != nil {
+ return metadata
+ }
+ }
+
+ return nil
+}
diff --git a/parsers/parser_utils.go b/parsers/parser_utils.go
new file mode 100644
index 0000000..b688262
--- /dev/null
+++ b/parsers/parser_utils.go
@@ -0,0 +1,24 @@
+package parsers
+
+import (
+ "net/http"
+ "strings"
+)
+
+func JoinArrayMap(obj map[string][]string) map[string]string {
+ headers := make(map[string]string, len(obj))
+ for key, value := range obj {
+ headers[key] = strings.Join(value, ";")
+ }
+
+ return headers
+}
+
+func CookiesMap(cookiesArray []*http.Cookie) map[string]string {
+ cookies := make(map[string]string, len(cookiesArray))
+ for _, cookie := range cookiesArray {
+ cookies[cookie.Name] = cookie.Value
+ }
+
+ return cookies
+}