diff --git a/CHANGELOG.next.asciidoc b/CHANGELOG.next.asciidoc index aae6cb734c4..9833cade980 100644 --- a/CHANGELOG.next.asciidoc +++ b/CHANGELOG.next.asciidoc @@ -388,6 +388,7 @@ field. You can revert this change by configuring tags for the module and omittin - Upgrade k8s.io/client-go and k8s keystore tests. {pull}18817[18817] - Add support for multiple sets of hints on autodiscover {pull}18883[18883] - Add a configurable delay between retries when an app metadata cannot be retrieved by `add_cloudfoundry_metadata`. {pull}19181[19181] +- Add data type conversion in `dissect` processor for converting string values to other basic data types. {pull}18683[18683] - Add the `ignore_failure` configuration option to the dissect processor. {pull}19464[19464] - Add the `overwrite_keys` configuration option to the dissect processor. {pull}19464[19464] - Add support to trim captured values in the dissect processor. {pull}19464[19464] diff --git a/libbeat/processors/dissect/config_test.go b/libbeat/processors/dissect/config_test.go index 5b08a15fe95..d645cfe5d1b 100644 --- a/libbeat/processors/dissect/config_test.go +++ b/libbeat/processors/dissect/config_test.go @@ -137,3 +137,51 @@ func TestConfig(t *testing.T) { assert.Equal(t, trimModeAll, cfg.TrimValues) }) } + +func TestConfigForDataType(t *testing.T) { + t.Run("valid data type", func(t *testing.T) { + c, err := common.NewConfigFrom(map[string]interface{}{ + "tokenizer": "%{value1|integer} %{value2|float} %{value3|boolean} %{value4|long} %{value5|double}", + "field": "message", + }) + if !assert.NoError(t, err) { + return + } + + cfg := config{} + err = c.Unpack(&cfg) + if !assert.NoError(t, err) { + return + } + }) + t.Run("invalid data type", func(t *testing.T) { + c, err := common.NewConfigFrom(map[string]interface{}{ + "tokenizer": "%{value1|int} %{value2|short} %{value3|char} %{value4|void} %{value5|unsigned} id=%{id|xyz} status=%{status|abc} msg=\"%{message}\"", + "field": "message", + }) + if !assert.NoError(t, err) { + return + } + + cfg := config{} + err = c.Unpack(&cfg) + if !assert.Error(t, err) { + return + } + }) + t.Run("missing data type", func(t *testing.T) { + c, err := common.NewConfigFrom(map[string]interface{}{ + "tokenizer": "%{value1|} %{value2|}", + "field": "message", + }) + if !assert.NoError(t, err) { + return + } + + cfg := config{} + err = c.Unpack(&cfg) + if !assert.Error(t, err) { + return + } + }) +} diff --git a/libbeat/processors/dissect/const.go b/libbeat/processors/dissect/const.go index aa0349cf82d..b34af702313 100644 --- a/libbeat/processors/dissect/const.go +++ b/libbeat/processors/dissect/const.go @@ -38,14 +38,18 @@ var ( indirectAppendPrefix = "&+" greedySuffix = "->" pointerFieldPrefix = "*" + dataTypeIndicator = "|" + dataTypeSeparator = "\\|" // Needed for regexp numberRE = "\\d{1,2}" + alphaRE = "[[:alpha:]]*" delimiterRE = regexp.MustCompile("(?s)(.*?)%\\{([^}]*?)}") suffixRE = regexp.MustCompile("(.+?)" + // group 1 for key name "(" + ordinalIndicator + "(" + numberRE + ")" + ")?" + // group 2, 3 for ordinal "(" + fixedLengthIndicator + "(" + numberRE + ")" + ")?" + // group 4, 5 for fixed length - "(" + greedySuffix + ")?$") // group 6 for greedy + "(" + greedySuffix + ")?" + // group 6 for greedy + "(" + dataTypeSeparator + "(" + alphaRE + ")?" + ")?$") // group 7,8 for data type separator and data type defaultJoinString = " " @@ -55,4 +59,6 @@ var ( errMixedPrefixIndirectAppend = errors.New("mixed prefix `&+`") errMixedPrefixAppendIndirect = errors.New("mixed prefix `&+`") errEmptyKey = errors.New("empty key") + errInvalidDatatype = errors.New("invalid data type") + errMissingDatatype = errors.New("missing data type") ) diff --git a/libbeat/processors/dissect/dissect.go b/libbeat/processors/dissect/dissect.go index bc9b0c75867..fea5bfbb219 100644 --- a/libbeat/processors/dissect/dissect.go +++ b/libbeat/processors/dissect/dissect.go @@ -17,10 +17,20 @@ package dissect -import "fmt" +import ( + "fmt" + "net" + "strconv" + "strings" + + "github.com/elastic/beats/v7/libbeat/common" + + "github.com/pkg/errors" +) // Map represents the keys and their values extracted with the defined tokenizer. type Map = map[string]string +type MapConverted = map[string]interface{} // positions represents the start and end position of the keys found in the string. type positions []position @@ -67,6 +77,23 @@ func (d *Dissector) Dissect(s string) (Map, error) { return d.resolve(s, positions), nil } +func (d *Dissector) DissectConvert(s string) (MapConverted, error) { + if len(s) == 0 { + return nil, errEmpty + } + + positions, err := d.extract(s) + if err != nil { + return nil, err + } + + if len(positions) == 0 { + return nil, errParsingFailure + } + + return d.resolveConvert(s, positions), nil +} + // Raw returns the raw tokenizer used to generate the actual parser. func (d *Dissector) Raw() string { return d.raw @@ -167,6 +194,35 @@ func (d *Dissector) resolve(s string, p positions) Map { return m } +func (d *Dissector) resolveConvert(s string, p positions) MapConverted { + lookup := make(common.MapStr, len(p)) + m := make(Map, len(p)) + mc := make(MapConverted, len(p)) + for _, f := range d.parser.fields { + pos := p[f.ID()] + f.Apply(s[pos.start:pos.end], m) // using map[string]string to avoid another set of apply methods + if !f.IsSaveable() { + lookup[f.Key()] = s[pos.start:pos.end] + } else { + key := f.Key() + if k, ok := lookup[f.Key()]; ok { + key = k.(string) + } + v, _ := m[key] + if f.DataType() != "" { + mc[key] = convertData(f.DataType(), v) + } else { + mc[key] = v + } + } + } + + for _, f := range d.parser.referenceFields { + delete(mc, f.Key()) + } + return mc +} + // New creates a new Dissector from a tokenized string. func New(tokenizer string) (*Dissector, error) { p, err := newParser(tokenizer) @@ -180,3 +236,51 @@ func New(tokenizer string) (*Dissector, error) { return &Dissector{parser: p, raw: tokenizer}, nil } + +// strToInt is a helper to interpret a string as either base 10 or base 16. +func strToInt(s string, bitSize int) (int64, error) { + base := 10 + if strings.HasPrefix(s, "0x") || strings.HasPrefix(s, "0X") { + // strconv.ParseInt will accept the '0x' or '0X` prefix only when base is 0. + base = 0 + } + return strconv.ParseInt(s, base, bitSize) +} + +func transformType(typ dataType, value string) (interface{}, error) { + value = strings.TrimRight(value, " ") + switch typ { + case String: + return value, nil + case Long: + return strToInt(value, 64) + case Integer: + i, err := strToInt(value, 32) + return int32(i), err + case Float: + f, err := strconv.ParseFloat(value, 32) + return float32(f), err + case Double: + d, err := strconv.ParseFloat(value, 64) + return float64(d), err + case Boolean: + return strconv.ParseBool(value) + case IP: + if net.ParseIP(value) != nil { + return value, nil + } + return "", errors.New("value is not a valid IP address") + default: + return value, nil + } +} + +func convertData(typ string, b string) interface{} { + if dt, ok := dataTypeNames[typ]; ok { + value, err := transformType(dt, b) + if err == nil { + return value + } + } + return b +} diff --git a/libbeat/processors/dissect/dissect_test.go b/libbeat/processors/dissect/dissect_test.go index c97e020dd16..3543d8c3821 100644 --- a/libbeat/processors/dissect/dissect_test.go +++ b/libbeat/processors/dissect/dissect_test.go @@ -33,6 +33,85 @@ func TestNoToken(t *testing.T) { assert.Equal(t, errInvalidTokenizer, err) } +func TestDissectConversion(t *testing.T) { + tests := []struct { + Name string + Tok string + Msg string + Expected map[string]interface{} + Fail bool + }{ + { + Name: "Convert 1 value", + Tok: "id=%{id|integer} msg=\"%{message}\"", + Msg: "id=7736 msg=\"Single value OK\"}", + Expected: map[string]interface{}{ + "id": int32(7736), + "message": "Single value OK", + }, + Fail: false, + }, + { + Name: "Convert multiple values values", + Tok: "id=%{id|integer} status=%{status|integer} duration=%{duration|float} uptime=%{uptime|long} success=%{success|boolean} msg=\"%{message}\"", + Msg: "id=7736 status=202 duration=0.975 uptime=1588975628 success=true msg=\"Request accepted\"}", + Expected: map[string]interface{}{ + "id": int32(7736), + "status": int32(202), + "duration": float32(0.975), + "uptime": int64(1588975628), + "success": true, + "message": "Request accepted", + }, + Fail: false, + }, + { + Name: "Convert 1 indirect field value", + Tok: "%{?k1}=%{&k1|integer} msg=\"%{message}\"", + Msg: "id=8268 msg=\"Single value indirect field\"}", + Expected: map[string]interface{}{ + "id": int32(8268), + "message": "Single value indirect field", + }, + Fail: false, + }, + { + Name: "Greedy padding skip test ->", + Tok: "id=%{id->|integer} padding_removed=%{padding_removed->|boolean} length=%{length->|long} msg=\"%{message}\"", + Msg: "id=1945 padding_removed=true length=123456789 msg=\"Testing for padding\"}", + Expected: map[string]interface{}{ + "id": int32(1945), + "padding_removed": true, + "length": int64(123456789), + "message": "Testing for padding", + }, + Fail: false, + }, + } + + for _, test := range tests { + t.Run(test.Name, func(t *testing.T) { + d, err := New(test.Tok) + if !assert.NoError(t, err) { + return + } + + if test.Fail { + _, err := d.DissectConvert(test.Msg) + assert.Error(t, err) + return + } + + r, err := d.DissectConvert(test.Msg) + if !assert.NoError(t, err) { + return + } + + assert.Equal(t, test.Expected, r) + }) + } +} + func TestEmptyString(t *testing.T) { d, err := New("%{hello}") _, err = d.Dissect("") @@ -179,3 +258,110 @@ func BenchmarkDissect(b *testing.B) { } }) } + +func dissectConversion(tok, msg string, b *testing.B) { + d, err := New(tok) + assert.NoError(b, err) + + _, err = d.Dissect(msg) + assert.NoError(b, err) +} + +func benchmarkConversion(tok, msg string, b *testing.B) { + for n := 0; n < b.N; n++ { + dissectConversion(tok, msg, b) + } +} + +func BenchmarkDissectNoConversionOneValue(b *testing.B) { + b.ReportAllocs() + benchmarkConversion("id=%{id} msg=\"%{message}\"", "id=7736 msg=\"Single value OK\"}", b) +} + +func BenchmarkDissectWithConversionOneValue(b *testing.B) { + b.ReportAllocs() + benchmarkConversion("id=%{id|integer} msg=\"%{message}\"", "id=7736 msg=\"Single value OK\"}", b) +} + +func BenchmarkDissectNoConversionMultipleValues(b *testing.B) { + b.ReportAllocs() + benchmarkConversion("id=%{id} status=%{status} duration=%{duration} uptime=%{uptime} success=%{success} msg=\"%{message}\"", + "id=7736 status=202 duration=0.975 uptime=1588975628 success=true msg=\"Request accepted\"}", b) +} + +func BenchmarkDissectWithConversionMultipleValues(b *testing.B) { + b.ReportAllocs() + benchmarkConversion("id=%{id|integer} status=%{status|integer} duration=%{duration|float} uptime=%{uptime|long} success=%{success|boolean} msg=\"%{message}\"", + "id=7736 status=202 duration=0.975 uptime=1588975628 success=true msg=\"Request accepted\"}", b) +} + +func BenchmarkDissectComplexStackTraceDegradation(b *testing.B) { + message := `18-Apr-2018 06:53:20.411 INFO [http-nio-8080-exec-1] org.apache.coyote.http11.Http11Processor.service Error parsing HTTP request header + Note: further occurrences of HTTP header parsing errors will be logged at DEBUG level. + java.lang.IllegalArgumentException: Invalid character found in method name. HTTP method names must be tokens + at org.apache.coyote.http11.Http11InputBuffer.parseRequestLine(Http11InputBuffer.java:426) + at org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:687) + at org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:66) + at org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:790) + at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1459) + at org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61) + at java.lang.Thread.run(Thread.java:748) MACHINE[hello]` + + tests := []struct { + Name string + Tok string + }{ + { + Name: "ComplexStackTrace-1", + Tok: "%{origin} %{message}", + }, + { + Name: "ComplexStackTrace-2", + Tok: "%{day} %{origin} %{message}", + }, + { + Name: "ComplexStackTrace-3", + Tok: "%{day}-%{month} %{origin} %{message}", + }, + { + Name: "ComplexStackTrace-4", + Tok: "%{day}-%{month}-%{year} %{origin} %{message}", + }, + { + Name: "ComplexStackTrace-5", + Tok: "%{day}-%{month}-%{year} %{hour} %{origin} %{message}", + }, + { + Name: "ComplexStackTrace-6", + Tok: "%{day}-%{month}-%{year} %{hour} %{severity} %{origin} %{message}", + }, + { + Name: "ComplexStackTrace-7", + Tok: "%{day}-%{month}-%{year} %{hour} %{severity} [%{thread_id}] %{origin} %{message}", + }, + { + Name: "ComplexStackTrace-8", + Tok: "%{day}-%{month}-%{year} %{hour} %{severity} [%{thread_id}] %{origin} %{first_line} %{message}", + }, + } + + for _, test := range tests { + b.Run(test.Name, func(b *testing.B) { + tok := test.Tok + msg := message + d, err := New(tok) + if !assert.NoError(b, err) { + return + } + b.ReportAllocs() + for n := 0; n < b.N; n++ { + r, err := d.Dissect(msg) + assert.NoError(b, err) + results = r + } + }) + } +} diff --git a/libbeat/processors/dissect/docs/dissect.asciidoc b/libbeat/processors/dissect/docs/dissect.asciidoc index e8edaa822e1..b3dcf240c7e 100644 --- a/libbeat/processors/dissect/docs/dissect.asciidoc +++ b/libbeat/processors/dissect/docs/dissect.asciidoc @@ -11,7 +11,7 @@ The `dissect` processor tokenizes incoming strings using defined patterns. ------- processors: - dissect: - tokenizer: "%{key1} %{key2}" + tokenizer: "%{key1} %{key2} %{key3|convert_datatype}" field: "message" target_prefix: "dissect" ------- @@ -19,6 +19,8 @@ processors: The `dissect` processor has the following configuration settings: `tokenizer`:: The field used to define the *dissection* pattern. + Optional convert datatype can be provided after the key using `|` as separator + to convert the value from string to integer, long, float, double, boolean or ip. `field`:: (Optional) The event field to tokenize. Default is `message`. @@ -64,12 +66,12 @@ For this example, imagine that an application generates the following messages: [source,sh] ---- -"App01 - WebServer is starting" -"App01 - WebServer is up and running" -"App01 - WebServer is scaling 2 pods" -"App02 - Database is will be restarted in 5 minutes" -"App02 - Database is up and running" -"App02 - Database is refreshing tables" +"321 - App01 - WebServer is starting" +"321 - App01 - WebServer is up and running" +"321 - App01 - WebServer is scaling 2 pods" +"789 - App02 - Database is will be restarted in 5 minutes" +"789 - App02 - Database is up and running" +"789 - App02 - Database is refreshing tables" ---- Use the `dissect` processor to split each message into two fields, for example, @@ -79,7 +81,7 @@ Use the `dissect` processor to split each message into two fields, for example, ---- processors: - dissect: - tokenizer: '"%{service.name} - %{service.status}"' + tokenizer: '"%{pid|integer} - %{service.name} - %{service.status}"' field: "message" target_prefix: "" ---- @@ -89,6 +91,7 @@ This configuration produces fields like: [source,json] ---- "service": { + "pid": 321, "name": "App01", "status": "WebServer is up and running" }, diff --git a/libbeat/processors/dissect/field.go b/libbeat/processors/dissect/field.go index bb92db0c18f..2c697ccf73d 100644 --- a/libbeat/processors/dissect/field.go +++ b/libbeat/processors/dissect/field.go @@ -29,6 +29,7 @@ type field interface { Ordinal() int Length() int Key() string + DataType() string ID() int Apply(b string, m Map) String() string @@ -37,11 +38,35 @@ type field interface { } type baseField struct { - id int - key string - ordinal int - length int - greedy bool + id int + key string + ordinal int + length int + greedy bool + dataType string +} + +type dataType uint8 + +// List of dataTypes. +const ( + Integer dataType = iota + Long + Float + Double + String + Boolean + IP +) + +var dataTypeNames = map[string]dataType{ + "integer": Integer, + "long": Long, + "float": Float, + "double": Double, + "string": String, + "boolean": Boolean, + "ip": IP, } func (f baseField) IsGreedy() bool { @@ -64,6 +89,10 @@ func (f baseField) Key() string { return f.key } +func (f baseField) DataType() string { + return f.dataType +} + func (f baseField) ID() int { return f.id } @@ -77,7 +106,7 @@ func (f baseField) IsFixedLength() bool { } func (f baseField) String() string { - return fmt.Sprintf("field: %s, ordinal: %d, greedy: %v", f.key, f.ordinal, f.IsGreedy()) + return fmt.Sprintf("field: %s, ordinal: %d, greedy: %v, dataType: %s", f.key, f.ordinal, f.IsGreedy(), f.DataType()) } // normalField is a simple key reference like this: `%{key}` @@ -204,7 +233,17 @@ func newField(id int, rawKey string, previous delimiter) (field, error) { return newSkipField(id), nil } - key, ordinal, length, greedy := extractKeyParts(rawKey) + key, dataType, ordinal, length, greedy := extractKeyParts(rawKey) + + // rawKey will have | as suffix when data type is missing + if strings.HasSuffix(rawKey, dataTypeIndicator) { + return nil, errMissingDatatype + } + if len(dataType) > 0 { + if _, ok := dataTypeNames[dataType]; !ok { + return nil, errInvalidDatatype + } + } // Conflicting prefix used. if strings.HasPrefix(key, appendIndirectPrefix) { @@ -228,9 +267,9 @@ func newField(id int, rawKey string, previous delimiter) (field, error) { } if strings.HasPrefix(key, indirectFieldPrefix) { - return newIndirectField(id, key[1:], length), nil + return newIndirectField(id, key[1:], dataType, length), nil } - return newNormalField(id, key, ordinal, length, greedy), nil + return newNormalField(id, key, dataType, ordinal, length, greedy), nil } func newSkipField(id int) skipField { @@ -262,29 +301,31 @@ func newAppendField(id int, key string, ordinal int, length int, greedy bool, pr } } -func newIndirectField(id int, key string, length int) indirectField { +func newIndirectField(id int, key string, dataType string, length int) indirectField { return indirectField{ baseField{ - id: id, - key: key, - length: length, + id: id, + key: key, + length: length, + dataType: dataType, }, } } -func newNormalField(id int, key string, ordinal int, length int, greedy bool) normalField { +func newNormalField(id int, key string, dataType string, ordinal int, length int, greedy bool) normalField { return normalField{ baseField{ - id: id, - key: key, - ordinal: ordinal, - length: length, - greedy: greedy, + id: id, + key: key, + ordinal: ordinal, + length: length, + greedy: greedy, + dataType: dataType, }, } } -func extractKeyParts(rawKey string) (key string, ordinal int, length int, greedy bool) { +func extractKeyParts(rawKey string) (key string, dataType string, ordinal int, length int, greedy bool) { m := suffixRE.FindAllStringSubmatch(rawKey, -1) if m[0][3] != "" { @@ -299,5 +340,7 @@ func extractKeyParts(rawKey string) (key string, ordinal int, length int, greedy greedy = true } - return m[0][1], ordinal, length, greedy + dataType = m[0][8] + + return m[0][1], dataType, ordinal, length, greedy } diff --git a/libbeat/processors/dissect/processor.go b/libbeat/processors/dissect/processor.go index 746c86ba6c6..b3e8ac9f635 100644 --- a/libbeat/processors/dissect/processor.go +++ b/libbeat/processors/dissect/processor.go @@ -61,7 +61,14 @@ func NewProcessor(c *common.Config) (processors.Processor, error) { // Run takes the event and will apply the tokenizer on the configured field. func (p *processor) Run(event *beat.Event) (*beat.Event, error) { - v, err := event.GetValue(p.config.Field) + var ( + m Map + mc MapConverted + v interface{} + err error + ) + + v, err = event.GetValue(p.config.Field) if err != nil { return event, err } @@ -71,7 +78,18 @@ func (p *processor) Run(event *beat.Event) (*beat.Event, error) { return event, fmt.Errorf("field is not a string, value: `%v`, field: `%s`", v, p.config.Field) } - m, err := p.config.Tokenizer.Dissect(s) + convertDataType := false + for _, f := range p.config.Tokenizer.parser.fields { + if f.DataType() != "" { + convertDataType = true + } + } + + if convertDataType { + mc, err = p.config.Tokenizer.DissectConvert(s) + } else { + m, err = p.config.Tokenizer.Dissect(s) + } if err != nil { if err := common.AddTagsWithKey( event.Fields, @@ -86,7 +104,11 @@ func (p *processor) Run(event *beat.Event) (*beat.Event, error) { return event, err } - event, err = p.mapper(event, mapToMapStr(m)) + if convertDataType { + event, err = p.mapper(event, mapInterfaceToMapStr(mc)) + } else { + event, err = p.mapper(event, mapToMapStr(m)) + } if err != nil { return event, err } @@ -132,3 +154,11 @@ func mapToMapStr(m Map) common.MapStr { } return newMap } + +func mapInterfaceToMapStr(m MapConverted) common.MapStr { + newMap := make(common.MapStr, len(m)) + for k, v := range m { + newMap[k] = v + } + return newMap +} diff --git a/libbeat/processors/dissect/processor_test.go b/libbeat/processors/dissect/processor_test.go index 919ec66bc90..5a3d0217021 100644 --- a/libbeat/processors/dissect/processor_test.go +++ b/libbeat/processors/dissect/processor_test.go @@ -410,3 +410,48 @@ func TestOverwriteKeys(t *testing.T) { }) } } + +func TestProcessorConvert(t *testing.T) { + tests := []struct { + name string + c map[string]interface{} + fields common.MapStr + values map[string]interface{} + }{ + { + name: "extract integer", + c: map[string]interface{}{"tokenizer": "userid=%{user_id|integer}"}, + fields: common.MapStr{"message": "userid=7736"}, + values: map[string]interface{}{"dissect.user_id": int32(7736)}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + c, err := common.NewConfigFrom(test.c) + if !assert.NoError(t, err) { + return + } + + processor, err := NewProcessor(c) + if !assert.NoError(t, err) { + return + } + + e := beat.Event{Fields: test.fields} + newEvent, err := processor.Run(&e) + if !assert.NoError(t, err) { + return + } + + for field, value := range test.values { + v, err := newEvent.GetValue(field) + if !assert.NoError(t, err) { + return + } + + assert.Equal(t, value, v) + } + }) + } +} diff --git a/libbeat/processors/dissect/validate_test.go b/libbeat/processors/dissect/validate_test.go index dd19b688355..d2043dff054 100644 --- a/libbeat/processors/dissect/validate_test.go +++ b/libbeat/processors/dissect/validate_test.go @@ -32,7 +32,7 @@ func TestValidate(t *testing.T) { { name: "when we find reference field for all indirect field", p: &parser{ - fields: []field{newIndirectField(1, "hello", 0), newNormalField(0, "hola", 1, 0, false)}, + fields: []field{newIndirectField(1, "hello", "", 0), newNormalField(0, "hola", "", 1, 0, false)}, referenceFields: []field{newPointerField(2, "hello", 0)}, }, expectError: false, @@ -40,7 +40,7 @@ func TestValidate(t *testing.T) { { name: "when we cannot find all the reference field for all indirect field", p: &parser{ - fields: []field{newIndirectField(1, "hello", 0), newNormalField(0, "hola", 1, 0, false)}, + fields: []field{newIndirectField(1, "hello", "", 0), newNormalField(0, "hola", "", 1, 0, false)}, referenceFields: []field{newPointerField(2, "okhello", 0)}, }, expectError: true,