diff options
-rw-r--r-- | connection_handler.go | 12 | ||||
-rw-r--r-- | pcap_importer.go | 30 | ||||
-rw-r--r-- | rules_manager.go | 16 | ||||
-rw-r--r-- | storage_test.go | 24 | ||||
-rw-r--r-- | stream_handler_test.go | 10 |
5 files changed, 46 insertions, 46 deletions
diff --git a/connection_handler.go b/connection_handler.go index de7f508..de30634 100644 --- a/connection_handler.go +++ b/connection_handler.go @@ -15,7 +15,7 @@ const initialScannersCapacity = 1024 type BiDirectionalStreamFactory struct { storage Storage - serverIp gopacket.Endpoint + serverIP gopacket.Endpoint connections map[StreamFlow]ConnectionHandler mConnections sync.Mutex rulesManager *RulesManager @@ -50,7 +50,7 @@ func NewBiDirectionalStreamFactory(storage Storage, serverIP gopacket.Endpoint, factory := &BiDirectionalStreamFactory{ storage: storage, - serverIp: serverIP, + serverIP: serverIP, connections: make(map[StreamFlow]ConnectionHandler, initialConnectionsCapacity), mConnections: sync.Mutex{}, rulesManager: rulesManager, @@ -136,7 +136,7 @@ func (factory *BiDirectionalStreamFactory) New(net, transport gopacket.Flow) tcp delete(factory.connections, invertedFlow) } else { var connectionFlow StreamFlow - if net.Src() == factory.serverIp { + if net.Src() == factory.serverIP { connectionFlow = invertedFlow } else { connectionFlow = flow @@ -212,13 +212,13 @@ func (ch *connectionHandlerImpl) Complete(handler *StreamHandler) { return } - streamsIds := append(client.documentsIDs, server.documentsIDs...) + streamsIDs := append(client.documentsIDs, server.documentsIDs...) n, err := ch.Storage().Update(ConnectionStreams). - Filter(OrderedDocument{{"_id", UnorderedDocument{"$in": streamsIds}}}). + Filter(OrderedDocument{{"_id", UnorderedDocument{"$in": streamsIDs}}}). Many(UnorderedDocument{"connection_id": connectionID}) if err != nil { log.WithError(err).WithField("connection", connection).Error("failed to update connection streams") - } else if int(n) != len(streamsIds) { + } else if int(n) != len(streamsIDs) { log.WithError(err).WithField("connection", connection).Error("failed to update all connections streams") } } diff --git a/pcap_importer.go b/pcap_importer.go index ac7c0b5..00c84bd 100644 --- a/pcap_importer.go +++ b/pcap_importer.go @@ -17,7 +17,7 @@ import ( const initialAssemblerPoolSize = 16 const flushOlderThan = 5 * time.Minute -const invalidSessionId = "invalid_id" +const invalidSessionID = "invalid_id" const importUpdateProgressInterval = 3 * time.Second const initialPacketPerServicesMapSize = 16 const importedPcapsCollectionName = "imported_pcaps" @@ -29,16 +29,16 @@ type PcapImporter struct { sessions map[string]context.CancelFunc mAssemblers sync.Mutex mSessions sync.Mutex - serverIp gopacket.Endpoint + serverIP gopacket.Endpoint } type flowCount [2]int -func NewPcapImporter(storage Storage, serverIp net.IP) *PcapImporter { - serverEndpoint := layers.NewIPEndpoint(serverIp) +func NewPcapImporter(storage Storage, serverIP net.IP) *PcapImporter { + serverEndpoint := layers.NewIPEndpoint(serverIP) streamFactory := &BiDirectionalStreamFactory{ storage: storage, - serverIp: serverEndpoint, + serverIP: serverEndpoint, } streamPool := tcpassembly.NewStreamPool(streamFactory) @@ -49,7 +49,7 @@ func NewPcapImporter(storage Storage, serverIp net.IP) *PcapImporter { sessions: make(map[string]context.CancelFunc), mAssemblers: sync.Mutex{}, mSessions: sync.Mutex{}, - serverIp: serverEndpoint, + serverIP: serverEndpoint, } } @@ -60,7 +60,7 @@ func NewPcapImporter(storage Storage, serverIp net.IP) *PcapImporter { func (pi *PcapImporter) ImportPcap(fileName string) (string, error) { hash, err := Sha256Sum(fileName) if err != nil { - return invalidSessionId, err + return invalidSessionID, err } pi.mSessions.Lock() @@ -97,21 +97,21 @@ func (pi *PcapImporter) ImportPcap(fileName string) (string, error) { return hash, nil } -func (pi *PcapImporter) CancelImport(sessionId string) error { +func (pi *PcapImporter) CancelImport(sessionID string) error { pi.mSessions.Lock() defer pi.mSessions.Unlock() - cancel, ok := pi.sessions[sessionId] + cancel, ok := pi.sessions[sessionID] if ok { - delete(pi.sessions, sessionId) + delete(pi.sessions, sessionID) cancel() return nil } else { - return errors.New("session " + sessionId + " not found") + return errors.New("session " + sessionID + " not found") } } // Read the pcap and save the tcp stream flow to the database -func (pi *PcapImporter) parsePcap(sessionId, fileName string, ctx context.Context) { +func (pi *PcapImporter) parsePcap(sessionID, fileName string, ctx context.Context) { handle, err := pcap.OpenOffline(fileName) if err != nil { // TODO: update db and set error @@ -141,7 +141,7 @@ func (pi *PcapImporter) parsePcap(sessionId, fileName string, ctx context.Contex } _, _err := pi.storage.Update(importedPcapsCollectionName). - Filter(OrderedDocument{{"_id", sessionId}}). + Filter(OrderedDocument{{"_id", sessionID}}). One(nil) if _err != nil { log.Println("can't update importing statistics : ", _err) @@ -150,7 +150,7 @@ func (pi *PcapImporter) parsePcap(sessionId, fileName string, ctx context.Contex deleteSession := func() { pi.mSessions.Lock() - delete(pi.sessions, sessionId) + delete(pi.sessions, sessionID) pi.mSessions.Unlock() } @@ -193,7 +193,7 @@ func (pi *PcapImporter) parsePcap(sessionId, fileName string, ctx context.Contex tcp := packet.TransportLayer().(*layers.TCP) var servicePort, index int - if packet.NetworkLayer().NetworkFlow().Dst() == pi.serverIp { + if packet.NetworkLayer().NetworkFlow().Dst() == pi.serverIP { servicePort, _ = strconv.Atoi(tcp.DstPort.String()) index = 0 } else { diff --git a/rules_manager.go b/rules_manager.go index e358fed..482188e 100644 --- a/rules_manager.go +++ b/rules_manager.go @@ -25,7 +25,7 @@ type Pattern struct { Flags RegexFlags `json:"flags"` MinOccurrences int `json:"min_occurrences"` MaxOccurrences int `json:"max_occurrences"` - internalId int + internalID int compiledPattern *hyperscan.Pattern } @@ -42,7 +42,7 @@ type Filter struct { } type Rule struct { - Id RowID `json:"-" bson:"_id,omitempty"` + ID RowID `json:"-" bson:"_id,omitempty"` Name string `json:"name" binding:"required,min=3" bson:"name"` Color string `json:"color" binding:"required,hexcolor" bson:"color"` Notes string `json:"notes" bson:"notes,omitempty"` @@ -90,13 +90,13 @@ func (rm RulesManager) LoadRules() error { } rm.ruleIndex = len(rules) - return rm.generateDatabase(rules[len(rules)-1].Id) + return rm.generateDatabase(rules[len(rules)-1].ID) } func (rm RulesManager) AddRule(context context.Context, rule Rule) (string, error) { rm.mPatterns.Lock() - rule.Id = rm.storage.NewCustomRowID(uint64(rm.ruleIndex), time.Now()) + rule.ID = rm.storage.NewCustomRowID(uint64(rm.ruleIndex), time.Now()) rule.Enabled = true if err := rm.validateAndAddRuleLocal(&rule); err != nil { @@ -104,7 +104,7 @@ func (rm RulesManager) AddRule(context context.Context, rule Rule) (string, erro return "", err } - if err := rm.generateDatabase(rule.Id); err != nil { + if err := rm.generateDatabase(rule.ID); err != nil { rm.mPatterns.Unlock() log.WithError(err).WithField("rule", rule).Panic("failed to generate database") } @@ -114,7 +114,7 @@ func (rm RulesManager) AddRule(context context.Context, rule Rule) (string, erro log.WithError(err).WithField("rule", rule).Panic("failed to insert rule on database") } - return rule.Id.Hex(), nil + return rule.ID.Hex(), nil } func (rm RulesManager) validateAndAddRuleLocal(rule *Rule) error { @@ -133,7 +133,7 @@ func (rm RulesManager) validateAndAddRuleLocal(rule *Rule) error { if err != nil { return err } - pattern.internalId = len(rm.patterns) + len(newPatterns) + pattern.internalID = len(rm.patterns) + len(newPatterns) newPatterns[hash] = pattern } @@ -141,7 +141,7 @@ func (rm RulesManager) validateAndAddRuleLocal(rule *Rule) error { rm.patterns[key] = value } - rm.rules[rule.Id.Hex()] = *rule + rm.rules[rule.ID.Hex()] = *rule rm.rulesByName[rule.Name] = *rule return nil diff --git a/storage_test.go b/storage_test.go index e34bdb3..4caa30d 100644 --- a/storage_test.go +++ b/storage_test.go @@ -8,7 +8,7 @@ import ( ) type a struct { - Id primitive.ObjectID `bson:"_id,omitempty"` + ID primitive.ObjectID `bson:"_id,omitempty"` A string `bson:"a,omitempty"` B int `bson:"b,omitempty"` C time.Time `bson:"c,omitempty"` @@ -26,12 +26,12 @@ func TestOperationOnInvalidCollection(t *testing.T) { simpleDoc := UnorderedDocument{"key": "a", "value": 0} insertOp := wrapper.Storage.Insert("invalid_collection").Context(wrapper.Context) - insertedId, err := insertOp.One(simpleDoc) - assert.Nil(t, insertedId) + insertedID, err := insertOp.One(simpleDoc) + assert.Nil(t, insertedID) assert.Error(t, err) - insertedIds, err := insertOp.Many([]interface{}{simpleDoc}) - assert.Nil(t, insertedIds) + insertedIDs, err := insertOp.Many([]interface{}{simpleDoc}) + assert.Nil(t, insertedIDs) assert.Error(t, err) updateOp := wrapper.Storage.Update("invalid_collection").Context(wrapper.Context) @@ -155,12 +155,12 @@ func TestSimpleUpdateOneUpdateMany(t *testing.T) { assert.Nil(t, err) assert.Equal(t, int64(2), updated) - var upsertId interface{} - isUpdated, err = updateOp.Upsert(&upsertId).Filter(OrderedDocument{{"key", "d"}}). + var upsertID interface{} + isUpdated, err = updateOp.Upsert(&upsertID).Filter(OrderedDocument{{"key", "d"}}). One(OrderedDocument{{"key", "d"}}) assert.Nil(t, err) assert.False(t, isUpdated) - assert.NotNil(t, upsertId) + assert.NotNil(t, upsertID) var results []UnorderedDocument findOp := wrapper.Storage.Find(collectionName).Context(wrapper.Context) @@ -198,7 +198,7 @@ func TestComplexInsertManyFindMany(t *testing.T) { }, }, a{ - Id: oid1, + ID: oid1, A: "test1", B: 1, C: testTime, @@ -218,9 +218,9 @@ func TestComplexInsertManyFindMany(t *testing.T) { assert.Nil(t, err) assert.Len(t, results, 3) doc0, doc1, doc2 := docs[0].(a), docs[1].(a), docs[2].(a) - assert.Equal(t, ids[0], results[0].Id) - assert.Equal(t, doc1.Id, results[1].Id) - assert.Equal(t, ids[2], results[2].Id) + assert.Equal(t, ids[0], results[0].ID) + assert.Equal(t, doc1.ID, results[1].ID) + assert.Equal(t, ids[2], results[2].ID) assert.Equal(t, doc0.A, results[0].A) assert.Equal(t, doc1.A, results[1].A) assert.Equal(t, doc2.A, results[2].A) diff --git a/stream_handler_test.go b/stream_handler_test.go index 254f8c3..0f610f3 100644 --- a/stream_handler_test.go +++ b/stream_handler_test.go @@ -13,8 +13,8 @@ import ( "time" ) -const testSrcIp = "10.10.10.100" -const testDstIp = "10.10.10.1" +const testSrcIP = "10.10.10.100" +const testDstIP = "10.10.10.1" const srcPort = 44444 const dstPort = 8080 @@ -305,13 +305,13 @@ func createTestStreamHandler(wrapper *TestStorageWrapper, patterns hyperscan.Str patterns: patterns, } - srcIp := layers.NewIPEndpoint(net.ParseIP(testSrcIp)) - dstIp := layers.NewIPEndpoint(net.ParseIP(testDstIp)) + srcIP := layers.NewIPEndpoint(net.ParseIP(testSrcIP)) + dstIP := layers.NewIPEndpoint(net.ParseIP(testDstIP)) srcPort := layers.NewTCPPortEndpoint(srcPort) dstPort := layers.NewTCPPortEndpoint(dstPort) scanner := Scanner{scratch: scratch, version: ZeroRowID} - return NewStreamHandler(testConnectionHandler, StreamFlow{srcIp, dstIp, srcPort, dstPort}, scanner) + return NewStreamHandler(testConnectionHandler, StreamFlow{srcIP, dstIP, srcPort, dstPort}, scanner) } type testConnectionHandler struct { |