Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
mmetc committed Jun 5, 2024
1 parent 757e5d3 commit 59cae88
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 26 deletions.
18 changes: 12 additions & 6 deletions pkg/acquisition/modules/kafka/kafka_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,8 @@ group_id: crowdsec`,
},
}

subLogger := log.WithFields(log.Fields{
"type": "kafka",
})
subLogger := log.WithField("type", "kafka")

for _, test := range tests {
k := KafkaSource{}
err := k.Configure([]byte(test.config), subLogger, configuration.METRICS_NONE)
Expand All @@ -82,7 +81,6 @@ group_id: crowdsec`,
}

func writeToKafka(w *kafka.Writer, logs []string) {

for idx, log := range logs {
err := w.WriteMessages(context.Background(), kafka.Message{
Key: []byte(strconv.Itoa(idx)),
Expand All @@ -106,7 +104,9 @@ func createTopic(topic string, broker string) {
if err != nil {
panic(err)
}

var controllerConn *kafka.Conn

controllerConn, err = kafka.Dial("tcp", net.JoinHostPort(controller.Host, strconv.Itoa(controller.Port)))
if err != nil {
panic(err)
Expand All @@ -131,6 +131,7 @@ func TestStreamingAcquisition(t *testing.T) {
if runtime.GOOS == "windows" {
t.Skip("Skipping test on windows")
}

tests := []struct {
name string
logs []string
Expand Down Expand Up @@ -166,6 +167,7 @@ func TestStreamingAcquisition(t *testing.T) {
ts := ts
t.Run(ts.name, func(t *testing.T) {
k := KafkaSource{}

err := k.Configure([]byte(`
source: kafka
brokers:
Expand All @@ -174,12 +176,14 @@ topic: crowdsecplaintext`), subLogger, configuration.METRICS_NONE)
if err != nil {
t.Fatalf("could not configure kafka source : %s", err)
}

tomb := tomb.Tomb{}
out := make(chan types.Event)
err = k.StreamingAcquisition(out, &tomb)
cstest.AssertErrorContains(t, err, ts.expectedErr)

actualLines := 0

go writeToKafka(w, ts.logs)
READLOOP:
for {
Expand All @@ -195,13 +199,13 @@ topic: crowdsecplaintext`), subLogger, configuration.METRICS_NONE)
tomb.Wait()
})
}

}

func TestStreamingAcquisitionWithSSL(t *testing.T) {
if runtime.GOOS == "windows" {
t.Skip("Skipping test on windows")
}

tests := []struct {
name string
logs []string
Expand Down Expand Up @@ -236,6 +240,7 @@ func TestStreamingAcquisitionWithSSL(t *testing.T) {
ts := ts
t.Run(ts.name, func(t *testing.T) {
k := KafkaSource{}

err := k.Configure([]byte(`
source: kafka
brokers:
Expand All @@ -250,12 +255,14 @@ tls:
if err != nil {
t.Fatalf("could not configure kafka source : %s", err)
}

tomb := tomb.Tomb{}
out := make(chan types.Event)
err = k.StreamingAcquisition(out, &tomb)
cstest.AssertErrorContains(t, err, ts.expectedErr)

actualLines := 0

go writeToKafka(w2, ts.logs)
READLOOP:
for {
Expand All @@ -271,5 +278,4 @@ tls:
tomb.Wait()
})
}

}
13 changes: 6 additions & 7 deletions pkg/exprhelpers/exprlib_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package exprhelpers

import (
"context"
"fmt"
"os"
"testing"
"time"
Expand All @@ -22,9 +21,7 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/types"
)

var (
TestFolder = "tests"
)
const TestFolder = "tests"

func getDBClient(t *testing.T) *database.Client {
t.Helper()
Expand Down Expand Up @@ -78,21 +75,21 @@ func TestVisitor(t *testing.T) {
name: "debug : can't compile",
filter: "static_one.foo.toto == 'lol'",
result: false,
err: fmt.Errorf("bad syntax"),
err: errors.New("bad syntax"),
env: map[string]interface{}{"static_one": map[string]string{"foo": "bar"}},
},
{
name: "debug : can't compile #2",
filter: "static_one.f!oo.to/to == 'lol'",
result: false,
err: fmt.Errorf("bad syntax"),
err: errors.New("bad syntax"),
env: map[string]interface{}{"static_one": map[string]string{"foo": "bar"}},
},
{
name: "debug : can't compile #3",
filter: "",
result: false,
err: fmt.Errorf("bad syntax"),
err: errors.New("bad syntax"),
env: map[string]interface{}{"static_one": map[string]string{"foo": "bar"}},
},
}
Expand Down Expand Up @@ -193,10 +190,12 @@ func TestDistanceHelper(t *testing.T) {
"lat2": test.lat2,
"lon2": test.lon2,
}

vm, err := expr.Compile(test.expr, GetExprOptions(env)...)
if err != nil {
t.Fatalf("pattern:%s val:%s NOK %s", test.lat1, test.lon1, err)
}

ret, err := expr.Run(vm, env)
if test.valid {
require.NoError(t, err)
Expand Down
7 changes: 5 additions & 2 deletions pkg/exprhelpers/xml_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,16 @@ func TestXMLGetAttributeValue(t *testing.T) {

for _, test := range tests {
result, _ := XMLGetAttributeValue(test.xmlString, test.path, test.attribute)

isOk := assert.Equal(t, test.expectResult, result)
if !isOk {
t.Fatalf("test '%s' failed", test.name)
}

log.Printf("test '%s' : OK", test.name)
}

}

func TestXMLGetNodeValue(t *testing.T) {
if err := Init(nil); err != nil {
t.Fatal(err)
Expand Down Expand Up @@ -105,11 +107,12 @@ func TestXMLGetNodeValue(t *testing.T) {

for _, test := range tests {
result, _ := XMLGetNodeValue(test.xmlString, test.path)

isOk := assert.Equal(t, test.expectResult, result)
if !isOk {
t.Fatalf("test '%s' failed", test.name)
}

log.Printf("test '%s' : OK", test.name)
}

}
28 changes: 21 additions & 7 deletions pkg/leakybucket/buckets_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,25 +61,31 @@ func TestBucket(t *testing.T) {
}
} else {
wg := new(sync.WaitGroup)

fds, err := os.ReadDir(testdata)
if err != nil {
t.Fatalf("Unable to read test directory : %s", err)
}

for _, fd := range fds {
if fd.Name() == "hub" {
continue
}

fname := filepath.Join(testdata, fd.Name())
log.Infof("Running test on %s", fname)
tomb.Go(func() error {
wg.Add(1)
defer wg.Done()

if err := testOneBucket(t, hub, fname, tomb); err != nil {
t.Fatalf("Test '%s' failed : %s", fname, err)
}

return nil
})
}

wg.Wait()
}
}
Expand All @@ -92,22 +98,22 @@ func watchTomb(tomb *tomb.Tomb) {
log.Warning("Tomb is dead")
break
}

time.Sleep(100 * time.Millisecond)
}
}

func testOneBucket(t *testing.T, hub *cwhub.Hub, dir string, tomb *tomb.Tomb) error {

var (
holders []BucketFactory

stagefiles []byte
stagecfg string
stages []parser.Stagefile
err error
buckets *Buckets
)
buckets = NewBuckets()

buckets := NewBuckets()

/*load the scenarios*/
stagecfg = dir + "/scenarios.yaml"
Expand All @@ -117,51 +123,59 @@ func testOneBucket(t *testing.T, hub *cwhub.Hub, dir string, tomb *tomb.Tomb) er

tmpl, err := template.New("test").Parse(string(stagefiles))
if err != nil {
return fmt.Errorf("failed to parse template %s : %s", stagefiles, err)
return fmt.Errorf("failed to parse template %s: %w", stagefiles, err)
}

var out bytes.Buffer

err = tmpl.Execute(&out, map[string]string{"TestDirectory": dir})
if err != nil {
panic(err)
}

if err := yaml.UnmarshalStrict(out.Bytes(), &stages); err != nil {
t.Fatalf("failed unmarshaling %s : %s", stagecfg, err)
}

files := []string{}
for _, x := range stages {
files = append(files, x.Filename)
}

cscfg := &csconfig.CrowdsecServiceCfg{}

holders, response, err := LoadBuckets(cscfg, hub, files, tomb, buckets, false)
if err != nil {
t.Fatalf("failed loading bucket : %s", err)
}

tomb.Go(func() error {
watchTomb(tomb)
return nil
})

if !testFile(t, filepath.Join(dir, "test.json"), filepath.Join(dir, "in-buckets_state.json"), holders, response, buckets) {
return fmt.Errorf("tests from %s failed", dir)
}

return nil
}

func testFile(t *testing.T, file string, bs string, holders []BucketFactory, response chan types.Event, buckets *Buckets) bool {

var results []types.Event
var dump bool

//should we restore
// should we restore
if _, err := os.Stat(bs); err == nil {
dump = true

if err := LoadBucketsState(bs, buckets, holders); err != nil {
t.Fatalf("Failed to load bucket state : %s", err)
}
}

/* now we can load the test files */
//process the yaml
// process the yaml
yamlFile, err := os.Open(file)
if err != nil {
t.Errorf("yamlFile.Get err #%v ", err)
Expand Down
8 changes: 4 additions & 4 deletions pkg/parser/parsing_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -100,24 +100,24 @@ func testOneParser(pctx *UnixParserCtx, ectx EnricherCtx, dir string, b *testing
parser_cfg_file := fmt.Sprintf("%s/parsers.yaml", dir)
cfg, err := os.ReadFile(parser_cfg_file)
if err != nil {
return fmt.Errorf("failed opening %s : %s", parser_cfg_file, err)
return fmt.Errorf("failed opening %s: %w", parser_cfg_file, err)
}
tmpl, err := template.New("test").Parse(string(cfg))
if err != nil {
return fmt.Errorf("failed to parse template %s : %s", cfg, err)
return fmt.Errorf("failed to parse template %s: %w", cfg, err)
}
var out bytes.Buffer
err = tmpl.Execute(&out, map[string]string{"TestDirectory": dir})
if err != nil {
panic(err)
}
if err = yaml.UnmarshalStrict(out.Bytes(), &parser_configs); err != nil {
return fmt.Errorf("failed unmarshaling %s : %s", parser_cfg_file, err)
return fmt.Errorf("failed unmarshaling %s: %w", parser_cfg_file, err)
}

pnodes, err = LoadStages(parser_configs, pctx, ectx)
if err != nil {
return fmt.Errorf("unable to load parser config : %s", err)
return fmt.Errorf("unable to load parser config: %w", err)
}

//TBD: Load post overflows
Expand Down

0 comments on commit 59cae88

Please sign in to comment.