aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEmiliano Ciavatta2020-04-01 12:21:58 +0000
committerEmiliano Ciavatta2020-04-01 12:21:58 +0000
commit2b9ccbe76c6975fe7cd09a3260ba459d59b9970a (patch)
tree6eaf94a797261f2db0736d4abe999db219fcad7a
Initial commit
-rw-r--r--.gitignore10
-rw-r--r--Dockerfile10
-rw-r--r--Dockerfile.env42
-rw-r--r--README.md9
-rw-r--r--caronte.go24
-rw-r--r--docker-compose.testing.yml27
-rw-r--r--go.mod10
-rw-r--r--go.sum127
-rw-r--r--pcap_importer.go237
-rw-r--r--storage.go137
-rw-r--r--storage_test.go126
-rw-r--r--stream_factory.go29
-rw-r--r--stream_handler.go206
-rw-r--r--utils.go30
14 files changed, 1024 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..df15af9
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,10 @@
+# IntelliJ stuff
+.idea/
+
+# Pcaps
+*.pcap
+*.pcapng
+
+# Build file
+caronte
+
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..2a1ffc8
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,10 @@
+FROM eciavatta/caronte-env
+
+COPY . /caronte
+
+WORKDIR /caronte
+
+RUN go mod download && go build
+
+CMD ./caronte
+
diff --git a/Dockerfile.env b/Dockerfile.env
new file mode 100644
index 0000000..844be8e
--- /dev/null
+++ b/Dockerfile.env
@@ -0,0 +1,42 @@
+FROM ubuntu:18.04
+
+ENV RAGEL_VERSION 6.10
+ENV HYPERSCAN_VERSION 5.2.1
+ENV GO_VERSION 1.14.1
+ENV PATH /usr/local/go/bin:$PATH
+
+# Install tools and libraries
+RUN apt-get update && apt-get install -y git wget make cmake build-essential python3 libpcap-dev pkg-config
+
+# Get Boost source to compile Hyperscan
+RUN wget https://dl.bintray.com/boostorg/release/1.72.0/source/boost_1_72_0.tar.gz -P /tmp/ && \
+ tar -C /tmp/ -xzf /tmp/boost_1_72_0.tar.gz
+
+# Get Ragel source and install it
+RUN wget http://www.colm.net/files/ragel/ragel-$RAGEL_VERSION.tar.gz -P /tmp && \
+ tar -C /tmp/ -xzf /tmp/ragel-$RAGEL_VERSION.tar.gz && \
+ cd /tmp/ragel-$RAGEL_VERSION/ && \
+ ./configure && \
+ make && \
+ make install
+
+# Get Hyperscan source and install it
+RUN wget https://github.com/intel/hyperscan/archive/v$HYPERSCAN_VERSION.tar.gz -P /tmp && \
+ tar -C /tmp/ -xzf /tmp/v$HYPERSCAN_VERSION.tar.gz && \
+ cd /tmp/hyperscan-$HYPERSCAN_VERSION/ && \
+ mkdir build && \
+ cd build && \
+ cmake -G "Unix Makefiles" -DCMAKE_CXX_COMPILER=/usr/bin/g++ -DBUILD_STATIC_AND_SHARED=1 \
+ -DBOOST_ROOT=/tmp/boost_1_72_0 .. && \
+ make && \
+ make install
+
+# Get GoLang and install it
+RUN wget https://dl.google.com/go/go$GO_VERSION.linux-amd64.tar.gz -P /tmp && \
+ tar -C /usr/local -xzf /tmp/go$GO_VERSION.linux-amd64.tar.gz
+
+# Remove source files
+RUN rm -rf /tmp/*
+
+CMD /bin/bash
+
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..8180132
--- /dev/null
+++ b/README.md
@@ -0,0 +1,9 @@
+# [WIP] Caronte
+
+<img align="left" src="https://divinacommedia.weebly.com/uploads/5/5/2/3/5523249/1299707879.jpg">
+Caronte is a tool to analyze the network flow during capture the flag events of type attack/defence.
+It reassembles TCP packets captured in pcap files to rebuild TCP connections, and analyzes each connection to find user-defined patterns.
+The patterns can be defined as regex or using protocol specific rules.
+The connection flows are saved into a database and can be visualized with the web application. REST API are also provided.
+
+Packets can be captured locally on the same machine or can be imported remotely. The streams of bytes extracted from the TCP payload of packets are processed by [Hyperscan](https://github.com/intel/hyperscan), an high-performance regular expression matching library. // TODO \ No newline at end of file
diff --git a/caronte.go b/caronte.go
new file mode 100644
index 0000000..934828d
--- /dev/null
+++ b/caronte.go
@@ -0,0 +1,24 @@
+package main
+
+import (
+ "fmt"
+)
+
+func mainn() {
+ // testStorage()
+ storage := NewStorage("localhost", 27017, "testing")
+ err := storage.Connect(nil)
+ if err != nil {
+ panic(err)
+ }
+
+ importer := NewPcapImporter(&storage, "10.10.10.10")
+
+ sessionId, err := importer.ImportPcap("capture_00459_20190627165500.pcap")
+ if err != nil {
+ fmt.Println(err)
+ } else {
+ fmt.Println(sessionId)
+ }
+
+}
diff --git a/docker-compose.testing.yml b/docker-compose.testing.yml
new file mode 100644
index 0000000..c4eff8e
--- /dev/null
+++ b/docker-compose.testing.yml
@@ -0,0 +1,27 @@
+version: "3.7"
+services:
+
+ mongo:
+ image: mongo:4
+ networks:
+ - caronte-net
+ restart: always
+
+ caronte:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ image: caronte
+ ports:
+ - "6666:6666"
+ depends_on:
+ - mongo
+ networks:
+ - caronte-net
+ command: "./caronte"
+ environment:
+ MONGO_HOST: mongo
+ MONGO_PORT: 27017
+
+networks:
+ caronte-net:
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..5ea26e3
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,10 @@
+module github.com/eciavatta/caronte
+
+go 1.14
+
+require (
+ github.com/flier/gohs v1.0.0
+ github.com/google/gopacket v1.1.17
+ go.mongodb.org/mongo-driver v1.3.1
+ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..4d0e509
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,127 @@
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/flier/gohs v1.0.0 h1:Q0mmufGWTigzKb140WmJ0+k3EGAf335Qgv/pz5SOPvU=
+github.com/flier/gohs v1.0.0/go.mod h1:Jlg6A1xXSMhPorF74/LkYHkCHZ87Txi8CqIHHyIKgKg=
+github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=
+github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
+github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0=
+github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY=
+github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg=
+github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI=
+github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI=
+github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs=
+github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI=
+github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI=
+github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk=
+github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28=
+github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo=
+github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk=
+github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw=
+github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360=
+github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg=
+github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE=
+github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8=
+github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc=
+github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc=
+github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4=
+github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4=
+github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ=
+github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0=
+github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw=
+github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
+github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
+github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/gopacket v1.1.14/go.mod h1:UCLx9mCmAwsVbn6qQl1WIEt2SO7Nd2fD0th1TBAsqBw=
+github.com/google/gopacket v1.1.17 h1:rMrlX2ZY2UbvT+sdz3+6J+pp2z+msCq9MxTU6ymxbBY=
+github.com/google/gopacket v1.1.17/go.mod h1:UdDNZ1OO62aGYVnPhxT1U6aI7ukYtA/kB8vaU0diBUM=
+github.com/gopherjs/gopherjs v0.0.0-20180825215210-0210a2f0f73c h1:16eHWuMGvCjSfgRJKqIzapE78onvvTbdi1rMkU00lZw=
+github.com/gopherjs/gopherjs v0.0.0-20180825215210-0210a2f0f73c/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
+github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
+github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
+github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o=
+github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
+github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
+github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
+github.com/jtolds/gls v4.2.1+incompatible h1:fSuqC+Gmlu6l/ZYAoZzx2pyucC8Xza35fpRVWLVmUEE=
+github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
+github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4=
+github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA=
+github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
+github.com/klauspost/compress v1.9.5 h1:U+CaK85mrNNb4k8BNOfgJtJ/gr6kswUCFj6miSzVC6M=
+github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
+github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
+github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=
+github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
+github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
+github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
+github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
+github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
+github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
+github.com/smartystreets/assertions v0.0.0-20180820201707-7c9eb446e3cf h1:6V1qxN6Usn4jy8unvggSJz/NC790tefw8Zdy6OZS5co=
+github.com/smartystreets/assertions v0.0.0-20180820201707-7c9eb446e3cf/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
+github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a h1:JSvGDIbmil4Ui/dDdFBExb7/cmkNjyX5F97oglmvCDo=
+github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
+github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
+github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
+github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
+github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk=
+github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I=
+github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc h1:n+nNi93yXLkJvKwXNP9d55HC7lGK4H/SRcwB5IaUZLo=
+github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y=
+go.mongodb.org/mongo-driver v1.3.1 h1:op56IfTQiaY2679w922KVWa3qcHdml2K/Io8ayAOUEQ=
+go.mongodb.org/mongo-driver v1.3.1/go.mod h1:MSWZXKOynuguX+JSvwP8i+58jYCXxbia8HS3gZBapIE=
+golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
+golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5 h1:8dUaAV7K4uHsF56JQWkprecIQKdPHtR9jCHF5nB8uzc=
+golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3 h1:0GoQqolDA55aaLxZyTzK/Y2ePZzZTUrRacwib7cNsYQ=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190405154228-4b34438f7a67/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2 h1:T5DasATyLQfmbTpfEXx/IOL9vfjzW6up+ZDkmHvIf2s=
+golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
+golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
diff --git a/pcap_importer.go b/pcap_importer.go
new file mode 100644
index 0000000..9428b29
--- /dev/null
+++ b/pcap_importer.go
@@ -0,0 +1,237 @@
+package main
+
+import (
+ "context"
+ "errors"
+ "github.com/google/gopacket"
+ "github.com/google/gopacket/layers"
+ "github.com/google/gopacket/pcap"
+ "github.com/google/gopacket/tcpassembly"
+ "go.mongodb.org/mongo-driver/mongo"
+ "log"
+ "net"
+ "strconv"
+ "sync"
+ "time"
+)
+
+const initialAssemblerPoolSize = 16
+const flushOlderThan = 5 * time.Minute
+const invalidSessionId = "invalid_id"
+const importUpdateProgressInterval = 3 * time.Second
+const initialPacketPerServicesMapSize = 16
+const importedPcapsCollectionName = "imported_pcaps"
+
+
+type PcapImporter struct {
+ storage *Storage
+ streamPool *tcpassembly.StreamPool
+ assemblers []*tcpassembly.Assembler
+ sessions map[string]context.CancelFunc
+ mAssemblers sync.Mutex
+ mSessions sync.Mutex
+ serverIp gopacket.Endpoint
+}
+
+type flowCount [2]int
+
+
+func NewPcapImporter(storage *Storage, serverIp string) *PcapImporter {
+ streamFactory := &BiDirectionalStreamFactory{
+ storage: storage,
+ serverIp: serverIp,
+ }
+ streamPool := tcpassembly.NewStreamPool(streamFactory)
+
+ return &PcapImporter{
+ storage: storage,
+ streamPool: streamPool,
+ assemblers: make([]*tcpassembly.Assembler, 0, initialAssemblerPoolSize),
+ sessions: make(map[string]context.CancelFunc),
+ mAssemblers: sync.Mutex{},
+ mSessions: sync.Mutex{},
+ serverIp: layers.NewIPEndpoint(net.ParseIP(serverIp)),
+ }
+}
+
+// Import a pcap file to the database. The pcap file must be present at the fileName path. If the pcap is already
+// going to be imported or if it has been already imported in the past the function returns an error. Otherwise it
+// create a new session and starts to import the pcap, and returns immediately the session name (that is the sha256
+// of the pcap).
+func (pi *PcapImporter) ImportPcap(fileName string) (string, error) {
+ hash, err := Sha256Sum(fileName)
+ if err != nil {
+ return invalidSessionId, err
+ }
+
+ pi.mSessions.Lock()
+ _, ok := pi.sessions[hash]
+ if ok {
+ pi.mSessions.Unlock()
+ return hash, errors.New("another equal session in progress")
+ }
+
+ doc := OrderedDocument{
+ {"_id", hash},
+ {"started_at", time.Now()},
+ {"completed_at", nil},
+ {"processed_packets", 0},
+ {"invalid_packets", 0},
+ {"packets_per_services", nil},
+ {"importing_error", err},
+ }
+ ctx, canc := context.WithCancel(context.Background())
+ _, err = pi.storage.InsertOne(ctx, importedPcapsCollectionName, doc)
+ if err != nil {
+ pi.mSessions.Unlock()
+ _, alreadyProcessed := err.(mongo.WriteException)
+ if alreadyProcessed {
+ return hash, errors.New("pcap already processed")
+ }
+ return hash, err
+ }
+ pi.sessions[hash] = canc
+ pi.mSessions.Unlock()
+
+ go pi.parsePcap(hash, fileName, ctx)
+
+ return hash, nil
+}
+
+func (pi *PcapImporter) CancelImport(sessionId string) error {
+ pi.mSessions.Lock()
+ defer pi.mSessions.Unlock()
+ cancel, ok := pi.sessions[sessionId]
+ if ok {
+ delete(pi.sessions, sessionId)
+ cancel()
+ return nil
+ } else {
+ return errors.New("session " + sessionId + " not found")
+ }
+}
+
+// Read the pcap and save the tcp stream flow to the database
+func (pi *PcapImporter) parsePcap(sessionId, fileName string, ctx context.Context) {
+ handle, err := pcap.OpenOffline(fileName)
+ if err != nil {
+ // TODO: update db and set error
+ return
+ }
+
+ packetSource := gopacket.NewPacketSource(handle, handle.LinkType())
+ packetSource.NoCopy = true
+ assembler := pi.takeAssembler()
+ packets := packetSource.Packets()
+ firstPacketTime := time.Time{}
+ updateProgressInterval := time.Tick(importUpdateProgressInterval)
+
+ processedPackets := 0
+ invalidPackets := 0
+ packetsPerService := make(map[int]*flowCount, initialPacketPerServicesMapSize)
+
+ progressUpdate := func(completed bool, err error) {
+ update := UnorderedDocument{
+ "processed_packets": processedPackets,
+ "invalid_packets": invalidPackets,
+ "packets_per_services": packetsPerService,
+ "importing_error": err,
+ }
+ if completed {
+ update["completed_at"] = time.Now()
+ }
+
+ _, _err := pi.storage.UpdateOne(nil, importedPcapsCollectionName, OrderedDocument{{"_id", sessionId}},
+ completed, false)
+
+ if _err != nil {
+ log.Println("can't update importing statistics : ", _err)
+ }
+ }
+
+ deleteSession := func() {
+ pi.mSessions.Lock()
+ delete(pi.sessions, sessionId)
+ pi.mSessions.Unlock()
+ }
+
+ for {
+ select {
+ case <- ctx.Done():
+ handle.Close()
+ deleteSession()
+ progressUpdate(false, errors.New("import process cancelled"))
+ return
+ default:
+ }
+
+ select {
+ case packet := <-packets:
+ if packet == nil { // completed
+ if !firstPacketTime.IsZero() {
+ assembler.FlushOlderThan(firstPacketTime.Add(-flushOlderThan))
+ }
+ pi.releaseAssembler(assembler)
+ handle.Close()
+
+ deleteSession()
+ progressUpdate(true, nil)
+
+ return
+ }
+ processedPackets++
+
+ if packet.NetworkLayer() == nil || packet.TransportLayer() == nil ||
+ packet.TransportLayer().LayerType() != layers.LayerTypeTCP { // invalid packet
+ invalidPackets++
+ continue
+ }
+
+ timestamp := packet.Metadata().Timestamp
+ if firstPacketTime.IsZero() {
+ firstPacketTime = timestamp
+ }
+
+ tcp := packet.TransportLayer().(*layers.TCP)
+ var servicePort, index int
+ if packet.NetworkLayer().NetworkFlow().Dst() == pi.serverIp {
+ servicePort, _ = strconv.Atoi(tcp.DstPort.String())
+ index = 0
+ } else {
+ servicePort, _ = strconv.Atoi(tcp.SrcPort.String())
+ index = 1
+ }
+ fCount, ok := packetsPerService[servicePort]
+ if !ok {
+ fCount = &flowCount{0, 0}
+ packetsPerService[servicePort] = fCount
+ }
+ fCount[index]++
+
+ assembler.AssembleWithTimestamp(packet.NetworkLayer().NetworkFlow(), tcp, timestamp)
+ case <-updateProgressInterval:
+ progressUpdate(false, nil)
+ }
+ }
+}
+
+func (pi *PcapImporter) takeAssembler() *tcpassembly.Assembler {
+ pi.mAssemblers.Lock()
+ defer pi.mAssemblers.Unlock()
+
+ if len(pi.assemblers) == 0 {
+ return tcpassembly.NewAssembler(pi.streamPool)
+ }
+
+ index := len(pi.assemblers) - 1
+ assembler := pi.assemblers[index]
+ pi.assemblers = pi.assemblers[:index]
+
+ return assembler
+}
+
+func (pi *PcapImporter) releaseAssembler(assembler *tcpassembly.Assembler) {
+ pi.mAssemblers.Lock()
+ pi.assemblers = append(pi.assemblers, assembler)
+ pi.mAssemblers.Unlock()
+}
diff --git a/storage.go b/storage.go
new file mode 100644
index 0000000..e8f6645
--- /dev/null
+++ b/storage.go
@@ -0,0 +1,137 @@
+package main
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson"
+ "go.mongodb.org/mongo-driver/mongo"
+ "go.mongodb.org/mongo-driver/mongo/options"
+)
+
+const defaultConnectionTimeout = 10*time.Second
+const defaultOperationTimeout = 3*time.Second
+
+type Storage struct {
+ client *mongo.Client
+ collections map[string]*mongo.Collection
+}
+
+type OrderedDocument = bson.D
+type UnorderedDocument = bson.M
+
+func NewStorage(uri string, port int, database string) Storage {
+ opt := options.Client()
+ opt.ApplyURI(fmt.Sprintf("mongodb://%s:%v", uri, port))
+ client, err := mongo.NewClient(opt)
+ if err != nil {
+ panic("Failed to create mongo client")
+ }
+
+ db := client.Database(database)
+ colls := map[string]*mongo.Collection{
+ "imported_pcaps": db.Collection("imported_pcaps"),
+ "connections": db.Collection("connections"),
+ }
+
+ return Storage{
+ client: client,
+ collections: colls,
+ }
+}
+
+func (storage *Storage) Connect(ctx context.Context) error {
+ if ctx == nil {
+ ctx, _ = context.WithTimeout(context.Background(), defaultConnectionTimeout)
+ }
+
+ return storage.client.Connect(ctx)
+}
+
+func (storage *Storage) InsertOne(ctx context.Context, collectionName string,
+ document interface{}) (interface{}, error) {
+
+ collection, ok := storage.collections[collectionName]
+ if !ok {
+ return nil, errors.New("invalid collection: " + collectionName)
+ }
+
+ if ctx == nil {
+ ctx, _ = context.WithTimeout(context.Background(), defaultOperationTimeout)
+ }
+
+ result, err := collection.InsertOne(ctx, document)
+ if err != nil {
+ return nil, err
+ }
+
+ return result.InsertedID, nil
+}
+
+func (storage *Storage) UpdateOne(ctx context.Context, collectionName string,
+ filter interface{}, update interface {}, upsert bool) (interface{}, error) {
+
+ collection, ok := storage.collections[collectionName]
+ if !ok {
+ return nil, errors.New("invalid collection: " + collectionName)
+ }
+
+ if ctx == nil {
+ ctx, _ = context.WithTimeout(context.Background(), defaultOperationTimeout)
+ }
+
+ opts := options.Update().SetUpsert(upsert)
+ update = bson.D{{"$set", update}}
+
+ result, err := collection.UpdateOne(ctx, filter, update, opts)
+ if err != nil {
+ return nil, err
+ }
+
+ if upsert {
+ return result.UpsertedID, nil
+ }
+
+ return result.ModifiedCount == 1, nil
+}
+
+func (storage *Storage) FindOne(ctx context.Context, collectionName string,
+ filter interface{}) (UnorderedDocument, error) {
+
+ collection, ok := storage.collections[collectionName]
+ if !ok {
+ return nil, errors.New("invalid collection: " + collectionName)
+ }
+
+ if ctx == nil {
+ ctx, _ = context.WithTimeout(context.Background(), defaultOperationTimeout)
+ }
+
+ var result bson.M
+ err := collection.FindOne(ctx, filter).Decode(&result)
+ if err != nil {
+ if err == mongo.ErrNoDocuments {
+ return nil, nil
+ }
+
+ return nil, err
+ }
+
+ return result, nil
+}
+
+
+func testStorage() {
+ storage := NewStorage("localhost", 27017, "testing")
+ _ = storage.Connect(nil)
+
+ id, err := storage.InsertOne(nil, "connections", bson.M{"_id": "provaaa"})
+ if err != nil {
+ panic(err)
+ } else {
+ fmt.Println(id)
+ }
+
+}
diff --git a/storage_test.go b/storage_test.go
new file mode 100644
index 0000000..5356596
--- /dev/null
+++ b/storage_test.go
@@ -0,0 +1,126 @@
+package main
+
+import (
+ "crypto/sha256"
+ "fmt"
+ "go.mongodb.org/mongo-driver/mongo"
+ "go.mongodb.org/mongo-driver/mongo/options"
+ "golang.org/x/net/context"
+ "os"
+ "testing"
+ "time"
+)
+
+const testCollection = "characters"
+
+var storage Storage
+var testContext context.Context
+
+func testInsert(t *testing.T) {
+ // insert a document in an invalid connection
+ insertedId, err := storage.InsertOne(testContext, "invalid_collection",
+ OrderedDocument{{"key", "invalid"}})
+ if insertedId != nil || err == nil {
+ t.Fatal("inserting documents in invalid collections must fail")
+ }
+
+ // insert ordered document
+ beatriceId, err := storage.InsertOne(testContext, testCollection,
+ OrderedDocument{{"name", "Beatrice"}, {"description", "blablabla"}})
+ if err != nil {
+ t.Fatal(err)
+ }
+ if beatriceId == nil {
+ t.Fatal("failed to insert an ordered document")
+ }
+
+ // insert unordered document
+ virgilioId, err := storage.InsertOne(testContext, testCollection,
+ UnorderedDocument{"name": "Virgilio", "description": "blablabla"})
+ if err != nil {
+ t.Fatal(err)
+ }
+ if virgilioId == nil {
+ t.Fatal("failed to insert an unordered document")
+ }
+
+ // insert document with custom id
+ danteId := "000000"
+ insertedId, err = storage.InsertOne(testContext, testCollection,
+ UnorderedDocument{"_id": danteId, "name": "Dante Alighieri", "description": "blablabla"})
+ if err != nil {
+ t.Fatal(err)
+ }
+ if insertedId != danteId {
+ t.Fatal("returned id doesn't match")
+ }
+
+ // insert duplicate document
+ insertedId, err = storage.InsertOne(testContext, testCollection,
+ UnorderedDocument{"_id": danteId, "name": "Dante Alighieri", "description": "blablabla"})
+ if insertedId != nil || err == nil {
+ t.Fatal("inserting duplicate id must fail")
+ }
+}
+
+func testFindOne(t *testing.T) {
+ // find a document in an invalid connection
+ result, err := storage.FindOne(testContext, "invalid_collection",
+ OrderedDocument{{"key", "invalid"}})
+ if result != nil || err == nil {
+ t.Fatal("find a document in an invalid collections must fail")
+ }
+
+ // find an existing document
+ result, err = storage.FindOne(testContext, testCollection, OrderedDocument{{"_id", "000000"}})
+ if err != nil {
+ t.Fatal(err)
+ }
+ if result == nil {
+ t.Fatal("FindOne cannot find the valid document")
+ }
+ name, ok := result["name"]
+ if !ok || name != "Dante Alighieri" {
+ t.Fatal("document retrieved with FindOne is invalid")
+ }
+
+ // find an existing document
+ result, err = storage.FindOne(testContext, testCollection, OrderedDocument{{"_id", "invalid_id"}})
+ if err != nil {
+ t.Fatal(err)
+ }
+ if result != nil {
+ t.Fatal("FindOne cannot find an invalid document")
+ }
+}
+
+
+
+func TestBasicOperations(t *testing.T) {
+ t.Run("testInsert", testInsert)
+ t.Run("testFindOne", testFindOne)
+}
+
+func TestMain(m *testing.M) {
+ uniqueDatabaseName := sha256.Sum256([]byte(time.Now().String()))
+ client, err := mongo.NewClient(options.Client().ApplyURI("mongodb://localhost:27017"))
+ if err != nil {
+ panic("failed to create mongo client")
+ }
+
+ db := client.Database(fmt.Sprintf("%x", uniqueDatabaseName[:31]))
+ storage = Storage{
+ client: client,
+ collections: map[string]*mongo.Collection{testCollection: db.Collection(testCollection)},
+ }
+
+ testContext, _ = context.WithTimeout(context.Background(), 10 * time.Second)
+
+ err = storage.Connect(nil)
+ if err != nil {
+ panic(err)
+ }
+
+ exitCode := m.Run()
+ os.Exit(exitCode)
+}
diff --git a/stream_factory.go b/stream_factory.go
new file mode 100644
index 0000000..e1d76a4
--- /dev/null
+++ b/stream_factory.go
@@ -0,0 +1,29 @@
+package main
+
+import (
+ "github.com/google/gopacket"
+ "github.com/google/gopacket/tcpassembly"
+)
+
+type BiDirectionalStreamFactory struct {
+ storage *Storage
+ serverIp string
+}
+
+// httpStream will handle the actual decoding of http requests.
+type uniDirectionalStream struct {
+ net, transport gopacket.Flow
+ r StreamHandler
+}
+
+func (h *BiDirectionalStreamFactory) New(net, transport gopacket.Flow) tcpassembly.Stream {
+ hstream := &uniDirectionalStream{
+ net: net,
+ transport: transport,
+ r: NewStreamHandler(),
+ }
+ // go hstream.run() // Important... we must guarantee that data from the tcpreader stream is read.
+
+ // StreamHandler implements tcpassembly.Stream, so we can return a pointer to it.
+ return &hstream.r
+}
diff --git a/stream_handler.go b/stream_handler.go
new file mode 100644
index 0000000..ad59856
--- /dev/null
+++ b/stream_handler.go
@@ -0,0 +1,206 @@
+// Package tcpreader provides an implementation for tcpassembly.Stream which presents
+// the caller with an io.Reader for easy processing.
+//
+// The assembly package handles packet data reordering, but its output is
+// library-specific, thus not usable by the majority of external Go libraries.
+// The io.Reader interface, on the other hand, is used throughout much of Go
+// code as an easy mechanism for reading in data streams and decoding them. For
+// example, the net/http package provides the ReadRequest function, which can
+// parse an HTTP request from a live data stream, just what we'd want when
+// sniffing HTTP traffic. Using StreamHandler, this is relatively easy to set
+// up:
+//
+// // Create our StreamFactory
+// type httpStreamFactory struct {}
+// func (f *httpStreamFactory) New(a, b gopacket.Flow) {
+// r := tcpreader.NewReaderStream(false)
+// go printRequests(r)
+// return &r
+// }
+// func printRequests(r io.Reader) {
+// // Convert to bufio, since that's what ReadRequest wants.
+// buf := bufio.NewReader(r)
+// for {
+// if req, err := http.ReadRequest(buf); err == io.EOF {
+// return
+// } else if err != nil {
+// log.Println("Error parsing HTTP requests:", err)
+// } else {
+// fmt.Println("HTTP REQUEST:", req)
+// fmt.Println("Body contains", tcpreader.DiscardBytesToEOF(req.Body), "bytes")
+// }
+// }
+// }
+//
+// Using just this code, we're able to reference a powerful, built-in library
+// for HTTP request parsing to do all the dirty-work of parsing requests from
+// the wire in real-time. Pass this stream factory to an tcpassembly.StreamPool,
+// start up an tcpassembly.Assembler, and you're good to go!
+package main
+
+import (
+"errors"
+"github.com/google/gopacket/tcpassembly"
+"io"
+)
+
+var discardBuffer = make([]byte, 4096)
+
+// DiscardBytesToFirstError will read in all bytes up to the first error
+// reported by the given reader, then return the number of bytes discarded
+// and the error encountered.
+func DiscardBytesToFirstError(r io.Reader) (discarded int, err error) {
+ for {
+ n, e := r.Read(discardBuffer)
+ discarded += n
+ if e != nil {
+ return discarded, e
+ }
+ }
+}
+
+// DiscardBytesToEOF will read in all bytes from a Reader until it
+// encounters an io.EOF, then return the number of bytes. Be careful
+// of this... if used on a Reader that returns a non-io.EOF error
+// consistently, this will loop forever discarding that error while
+// it waits for an EOF.
+func DiscardBytesToEOF(r io.Reader) (discarded int) {
+ for {
+ n, e := DiscardBytesToFirstError(r)
+ discarded += n
+ if e == io.EOF {
+ return
+ }
+ }
+}
+
+// StreamHandler implements both tcpassembly.Stream and io.Reader. You can use it
+// as a building block to make simple, easy stream handlers.
+//
+// IMPORTANT: If you use a StreamHandler, you MUST read ALL BYTES from it,
+// quickly. Not reading available bytes will block TCP stream reassembly. It's
+// a common pattern to do this by starting a goroutine in the factory's New
+// method:
+//
+// type myStreamHandler struct {
+// r StreamHandler
+// }
+// func (m *myStreamHandler) run() {
+// // Do something here that reads all of the StreamHandler, or your assembly
+// // will block.
+// fmt.Println(tcpreader.DiscardBytesToEOF(&m.r))
+// }
+// func (f *myStreamFactory) New(a, b gopacket.Flow) tcpassembly.Stream {
+// s := &myStreamHandler{}
+// go s.run()
+// // Return the StreamHandler as the stream that assembly should populate.
+// return &s.r
+// }
+type StreamHandler struct {
+ ReaderStreamOptions
+ reassembled chan []tcpassembly.Reassembly
+ done chan bool
+ current []tcpassembly.Reassembly
+ closed bool
+ lossReported bool
+ first bool
+ initiated bool
+}
+
+// ReaderStreamOptions provides user-resettable options for a StreamHandler.
+type ReaderStreamOptions struct {
+ // LossErrors determines whether this stream will return
+ // ReaderStreamDataLoss errors from its Read function whenever it
+ // determines data has been lost.
+ LossErrors bool
+}
+
+// NewReaderStream returns a new StreamHandler object.
+func NewStreamHandler() StreamHandler {
+ r := StreamHandler{
+ reassembled: make(chan []tcpassembly.Reassembly),
+ done: make(chan bool),
+ first: true,
+ initiated: true,
+ }
+ return r
+}
+
+// Reassembled implements tcpassembly.Stream's Reassembled function.
+func (r *StreamHandler) Reassembled(reassembly []tcpassembly.Reassembly) {
+ if !r.initiated {
+ panic("StreamHandler not created via NewReaderStream")
+ }
+ r.reassembled <- reassembly
+ <-r.done
+}
+
+// ReassemblyComplete implements tcpassembly.Stream's ReassemblyComplete function.
+func (r *StreamHandler) ReassemblyComplete() {
+ close(r.reassembled)
+ close(r.done)
+}
+
+// stripEmpty strips empty reassembly slices off the front of its current set of
+// slices.
+func (r *StreamHandler) stripEmpty() {
+ for len(r.current) > 0 && len(r.current[0].Bytes) == 0 {
+ r.current = r.current[1:]
+ r.lossReported = false
+ }
+}
+
+// DataLost is returned by the StreamHandler's Read function when it encounters
+// a Reassembly with Skip != 0.
+var DataLost = errors.New("lost data")
+
+// Read implements io.Reader's Read function.
+// Given a byte slice, it will either copy a non-zero number of bytes into
+// that slice and return the number of bytes and a nil error, or it will
+// leave slice p as is and return 0, io.EOF.
+func (r *StreamHandler) Read(p []byte) (int, error) {
+ if !r.initiated {
+ panic("StreamHandler not created via NewReaderStream")
+ }
+ var ok bool
+ r.stripEmpty()
+ for !r.closed && len(r.current) == 0 {
+ if r.first {
+ r.first = false
+ } else {
+ r.done <- true
+ }
+ if r.current, ok = <-r.reassembled; ok {
+ r.stripEmpty()
+ } else {
+ r.closed = true
+ }
+ }
+ if len(r.current) > 0 {
+ current := &r.current[0]
+ if r.LossErrors && !r.lossReported && current.Skip != 0 {
+ r.lossReported = true
+ return 0, DataLost
+ }
+ length := copy(p, current.Bytes)
+ current.Bytes = current.Bytes[length:]
+ return length, nil
+ }
+ return 0, io.EOF
+}
+
+// Close implements io.Closer's Close function, making StreamHandler a
+// io.ReadCloser. It discards all remaining bytes in the reassembly in a
+// manner that's safe for the assembler (IE: it doesn't block).
+func (r *StreamHandler) Close() error {
+ r.current = nil
+ r.closed = true
+ for {
+ if _, ok := <-r.reassembled; !ok {
+ return nil
+ }
+ r.done <- true
+ }
+}
+
+
diff --git a/utils.go b/utils.go
new file mode 100644
index 0000000..cc99d93
--- /dev/null
+++ b/utils.go
@@ -0,0 +1,30 @@
+package main
+
+import (
+ "crypto/sha256"
+ "io"
+ "log"
+ "os"
+)
+
+const invalidHashString = "invalid"
+
+func Sha256Sum(fileName string) (string, error) {
+ f, err := os.Open(fileName)
+ if err != nil {
+ return invalidHashString, err
+ }
+ defer func() {
+ err = f.Close()
+ if err != nil {
+ log.Println("Cannot close file " + fileName)
+ }
+ }()
+
+ h := sha256.New()
+ if _, err := io.Copy(h, f); err != nil {
+ return invalidHashString, err
+ }
+
+ return string(h.Sum(nil)), nil
+}