From 4475519ea750590eaa319a25a05f449f867cf9df Mon Sep 17 00:00:00 2001 From: Jan Wozniak Date: Wed, 10 Apr 2024 14:03:54 +0200 Subject: [PATCH 1/5] declarative scaler config parsing Signed-off-by: Jan Wozniak --- CHANGELOG.md | 1 + pkg/scalers/scalersconfig/typed_config.go | 402 +++++++++++++++ .../scalersconfig/typed_config_test.go | 482 ++++++++++++++++++ 3 files changed, 885 insertions(+) create mode 100644 pkg/scalers/scalersconfig/typed_config.go create mode 100644 pkg/scalers/scalersconfig/typed_config_test.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 787506b5079..961820a6343 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -59,6 +59,7 @@ To learn more about active deprecations, we recommend checking [GitHub Discussio ### New - TODO ([#XXX](https://github.com/kedacore/keda/issues/XXX)) +- **General**: Declarative scaler config parsing ([#5037](https://github.com/kedacore/keda/issues/5037)) #### Experimental diff --git a/pkg/scalers/scalersconfig/typed_config.go b/pkg/scalers/scalersconfig/typed_config.go new file mode 100644 index 00000000000..23694edcdcb --- /dev/null +++ b/pkg/scalers/scalersconfig/typed_config.go @@ -0,0 +1,402 @@ +/* +Copyright 2024 The KEDA Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package scalersconfig + +import ( + "encoding/json" + "errors" + "fmt" + "net/url" + "reflect" + "runtime/debug" + "strconv" + "strings" +) + +// CustomValidator is an interface that can be implemented to validate the configuration of the typed config +type CustomValidator interface { + Validate() error +} + +// ParsingOrder is a type that represents the order in which the parameters are parsed +type ParsingOrder string + +// Constants that represent the order in which the parameters are parsed +const ( + TriggerMetadata ParsingOrder = "triggerMetadata" + ResolvedEnv ParsingOrder = "resolvedEnv" + AuthParams ParsingOrder = "authParams" +) + +// separators for field tag structure +// e.g. name=stringVal,parsingOrder=triggerMetadata;resolvedEnv;authParams,optional +const ( + tagSeparator = "," + tagKeySeparator = "=" + tagValueSeparator = ";" +) + +// separators for map and slice elements +const ( + elemSeparator = "," + elemKeyValSeparator = "=" +) + +// field tag parameters +const ( + optionalTag = "optional" + deprecatedTag = "deprecated" + defaultTag = "default" + parsingOrderTag = "parsingOrder" + nameTag = "name" + enumTag = "enum" + exclusiveTag = "exclusive" +) + +// Params is a struct that represents the parameter list that can be used in the keda tag +type Params struct { + // FieldName is the name of the field in the struct + FieldName string + + // Name is the 'name' tag parameter defining the key in triggerMetadata, resolvedEnv or authParams + Name string + + // Optional is the 'optional' tag parameter defining if the parameter is optional + Optional bool + + // ParsingOrder is the 'parsingOrder' tag parameter defining the order in which the parameter is looked up + // in the triggerMetadata, resolvedEnv or authParams maps + ParsingOrder []ParsingOrder + + // Default is the 'default' tag parameter defining the default value of the parameter if it's not found + // in any of the maps from ParsingOrder + Default string + + // Deprecated is the 'deprecated' tag parameter, if the map contain this parameter, it is considered + // as an error and the DeprecatedMessage should be returned to the user + Deprecated string + + // Enum is the 'enum' tag parameter defining the list of possible values for the parameter + Enum []string + + // Exclusive is the 'exclusive' tag parameter defining the list of values that are mutually exclusive + Exclusive []string +} + +// IsNested is a function that returns true if the parameter is nested +func (p Params) IsNested() bool { + return p.Name == "" +} + +// IsDeprecated is a function that returns true if the parameter is deprecated +func (p Params) IsDeprecated() bool { + return p.Deprecated != "" +} + +// DeprecatedMessage is a function that returns the optional deprecated message if the parameter is deprecated +func (p Params) DeprecatedMessage() string { + if p.Deprecated == deprecatedTag { + return "" + } + return fmt.Sprintf(": %s", p.Deprecated) +} + +// TypedConfig is a function that is used to unmarshal the TriggerMetadata, ResolvedEnv and AuthParams +// populating the provided typedConfig where structure fields along with complementary field tags define +// declaratively the parsing rules +func (sc *ScalerConfig) TypedConfig(typedConfig any) (err error) { + defer func() { + if r := recover(); r != nil { + // this shouldn't happen, but calling certain reflection functions may result in panic + // if it does, it's better to return a error with stacktrace and reject parsing config + // rather than crashing KEDA + err = fmt.Errorf("failed to parse typed config %T resulted in panic\n%v", r, debug.Stack()) + } + }() + err = sc.parseTypedConfig(typedConfig, false) + return +} + +// parseTypedConfig is a function that is used to unmarshal the TriggerMetadata, ResolvedEnv and AuthParams +// this can be called recursively to parse nested structures +func (sc *ScalerConfig) parseTypedConfig(typedConfig any, parentOptional bool) error { + t := reflect.TypeOf(typedConfig) + if t.Kind() != reflect.Pointer { + return fmt.Errorf("typedConfig must be a pointer") + } + t = t.Elem() + v := reflect.ValueOf(typedConfig).Elem() + + errs := []error{} + for i := 0; i < t.NumField(); i++ { + fieldType := t.Field(i) + fieldValue := v.Field(i) + tag, exists := fieldType.Tag.Lookup("keda") + if !exists { + continue + } + tagParams, err := paramsFromTag(tag, fieldType) + if err != nil { + errs = append(errs, err) + continue + } + tagParams.Optional = tagParams.Optional || parentOptional + if err := sc.setValue(fieldValue, tagParams); err != nil { + errs = append(errs, err) + } + } + if validator, ok := typedConfig.(CustomValidator); ok { + if err := validator.Validate(); err != nil { + errs = append(errs, err) + } + } + return errors.Join(errs...) +} + +// setValue is a function that sets the value of the field based on the provided params +func (sc *ScalerConfig) setValue(field reflect.Value, params Params) error { + valFromConfig, exists := sc.configParamValue(params) + if exists && params.IsDeprecated() { + return fmt.Errorf("parameter %q is deprecated%v", params.Name, params.DeprecatedMessage()) + } + if !exists && params.Default != "" { + exists = true + valFromConfig = params.Default + } + if !exists && (params.Optional || params.IsDeprecated()) { + return nil + } + if !exists && !(params.Optional || params.IsDeprecated()) { + return fmt.Errorf("missing required parameter %q in %v", params.Name, params.ParsingOrder) + } + if params.Enum != nil { + enumMap := make(map[string]bool) + for _, e := range params.Enum { + enumMap[e] = true + } + missingMap := make(map[string]bool) + split := strings.Split(valFromConfig, elemSeparator) + for _, s := range split { + s := strings.TrimSpace(s) + if !enumMap[s] { + missingMap[s] = true + } + } + if len(missingMap) > 0 { + return fmt.Errorf("parameter %q value %q must be one of %v", params.Name, valFromConfig, params.Enum) + } + } + if params.Exclusive != nil { + exclusiveMap := make(map[string]bool) + for _, e := range params.Exclusive { + exclusiveMap[e] = true + } + split := strings.Split(valFromConfig, elemSeparator) + exclusiveCount := 0 + for _, s := range split { + s := strings.TrimSpace(s) + if exclusiveMap[s] { + exclusiveCount++ + } + } + if exclusiveCount > 1 { + return fmt.Errorf("parameter %q value %q must contain only one of %v", params.Name, valFromConfig, params.Exclusive) + } + } + if params.IsNested() { + for field.Kind() == reflect.Ptr { + field.Set(reflect.New(field.Type().Elem())) + field = field.Elem() + } + if field.Kind() != reflect.Struct { + return fmt.Errorf("nested parameter %q must be a struct, has kind %q", params.FieldName, field.Kind()) + } + return sc.parseTypedConfig(field.Addr().Interface(), params.Optional) + } + if err := setConfigValueHelper(valFromConfig, field); err != nil { + return fmt.Errorf("unable to set param %q value %q: %w", params.Name, valFromConfig, err) + } + return nil +} + +// setConfigValueURLParams is a function that sets the value of the url.Values field +func setConfigValueURLParams(valFromConfig string, field reflect.Value) error { + field.Set(reflect.MakeMap(reflect.MapOf(field.Type().Key(), field.Type().Elem()))) + vals, err := url.ParseQuery(valFromConfig) + if err != nil { + return fmt.Errorf("expected url.Values, unable to parse query %q: %w", valFromConfig, err) + } + for k, vs := range vals { + ifcMapKeyElem := reflect.New(field.Type().Key()).Elem() + ifcMapValueElem := reflect.New(field.Type().Elem()).Elem() + if err := setConfigValueHelper(k, ifcMapKeyElem); err != nil { + return fmt.Errorf("map key %q: %w", k, err) + } + for _, v := range vs { + ifcMapValueElem.Set(reflect.Append(ifcMapValueElem, reflect.ValueOf(v))) + } + field.SetMapIndex(ifcMapKeyElem, ifcMapValueElem) + } + return nil +} + +// setConfigValueMap is a function that sets the value of the map field +func setConfigValueMap(valFromConfig string, field reflect.Value) error { + field.Set(reflect.MakeMap(reflect.MapOf(field.Type().Key(), field.Type().Elem()))) + split := strings.Split(valFromConfig, elemSeparator) + for _, s := range split { + s := strings.TrimSpace(s) + kv := strings.Split(s, elemKeyValSeparator) + if len(kv) != 2 { + return fmt.Errorf("expected format key%vvalue, got %q", elemKeyValSeparator, s) + } + key := strings.TrimSpace(kv[0]) + val := strings.TrimSpace(kv[1]) + ifcKeyElem := reflect.New(field.Type().Key()).Elem() + if err := setConfigValueHelper(key, ifcKeyElem); err != nil { + return fmt.Errorf("map key %q: %w", key, err) + } + ifcValueElem := reflect.New(field.Type().Elem()).Elem() + if err := setConfigValueHelper(val, ifcValueElem); err != nil { + return fmt.Errorf("map key %q, value %q: %w", key, val, err) + } + field.SetMapIndex(ifcKeyElem, ifcValueElem) + } + return nil +} + +// setConfigValueSlice is a function that sets the value of the slice field +func setConfigValueSlice(valFromConfig string, field reflect.Value) error { + elemIfc := reflect.New(field.Type().Elem()).Interface() + split := strings.Split(valFromConfig, elemSeparator) + for i, s := range split { + s := strings.TrimSpace(s) + if err := setConfigValueHelper(s, reflect.ValueOf(elemIfc).Elem()); err != nil { + return fmt.Errorf("slice element %d: %w", i, err) + } + field.Set(reflect.Append(field, reflect.ValueOf(elemIfc).Elem())) + } + return nil +} + +// setParamValueHelper is a function that sets the value of the parameter +func setConfigValueHelper(valFromConfig string, field reflect.Value) error { + paramValue := reflect.ValueOf(valFromConfig) + if paramValue.Type().AssignableTo(field.Type()) { + field.SetString(valFromConfig) + return nil + } + if paramValue.Type().ConvertibleTo(field.Type()) { + field.Set(paramValue.Convert(field.Type())) + return nil + } + if field.Type() == reflect.TypeOf(url.Values{}) { + return setConfigValueURLParams(valFromConfig, field) + } + if field.Kind() == reflect.Map { + return setConfigValueMap(valFromConfig, field) + } + if field.Kind() == reflect.Slice { + return setConfigValueSlice(valFromConfig, field) + } + if field.CanInterface() { + ifc := reflect.New(field.Type()).Interface() + if err := json.Unmarshal([]byte(valFromConfig), &ifc); err != nil { + return fmt.Errorf("unable to unmarshal to field type %v: %w", field.Type(), err) + } + field.Set(reflect.ValueOf(ifc).Elem()) + return nil + } + return fmt.Errorf("unable to find matching parser for field type %v", field.Type()) +} + +// configParamValue is a function that returns the value of the parameter based on the parsing order +func (sc *ScalerConfig) configParamValue(params Params) (string, bool) { + for _, po := range params.ParsingOrder { + var m map[string]string + key := params.Name + switch po { + case TriggerMetadata: + m = sc.TriggerMetadata + case AuthParams: + m = sc.AuthParams + case ResolvedEnv: + m = sc.ResolvedEnv + key = sc.TriggerMetadata[fmt.Sprintf("%sFromEnv", params.Name)] + default: + m = sc.TriggerMetadata + } + if param, ok := m[key]; ok && param != "" { + return strings.TrimSpace(param), true + } + } + return "", params.IsNested() +} + +// paramsFromTag is a function that returns the Params struct based on the field tag +func paramsFromTag(tag string, field reflect.StructField) (Params, error) { + params := Params{FieldName: field.Name} + tagSplit := strings.Split(tag, tagSeparator) + for _, ts := range tagSplit { + tsplit := strings.Split(ts, tagKeySeparator) + tsplit[0] = strings.TrimSpace(tsplit[0]) + switch tsplit[0] { + case optionalTag: + if len(tsplit) == 1 { + params.Optional = true + } + if len(tsplit) > 1 { + params.Optional, _ = strconv.ParseBool(strings.TrimSpace(tsplit[1])) + } + case parsingOrderTag: + if len(tsplit) > 1 { + parsingOrder := strings.Split(tsplit[1], tagValueSeparator) + for _, po := range parsingOrder { + poTyped := ParsingOrder(strings.TrimSpace(po)) + params.ParsingOrder = append(params.ParsingOrder, poTyped) + } + } + case nameTag: + if len(tsplit) > 1 { + params.Name = strings.TrimSpace(tsplit[1]) + } + case deprecatedTag: + if len(tsplit) == 1 { + params.Deprecated = deprecatedTag + } else { + params.Deprecated = strings.TrimSpace(tsplit[1]) + } + case defaultTag: + if len(tsplit) > 1 { + params.Default = strings.TrimSpace(tsplit[1]) + } + case enumTag: + if len(tsplit) > 1 { + params.Enum = strings.Split(tsplit[1], tagValueSeparator) + } + case exclusiveTag: + if len(tsplit) > 1 { + params.Exclusive = strings.Split(tsplit[1], tagValueSeparator) + } + case "": + continue + default: + return params, fmt.Errorf("unknown tag param %s: %s", tsplit[0], tag) + } + } + return params, nil +} diff --git a/pkg/scalers/scalersconfig/typed_config_test.go b/pkg/scalers/scalersconfig/typed_config_test.go new file mode 100644 index 00000000000..c9f1e4f75e1 --- /dev/null +++ b/pkg/scalers/scalersconfig/typed_config_test.go @@ -0,0 +1,482 @@ +/* +Copyright 2024 The KEDA Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package scalersconfig + +import ( + "net/url" + "testing" + + . "github.com/onsi/gomega" +) + +// TestBasicTypedConfig tests the basic types for typed config +func TestBasicTypedConfig(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "stringVal": "value1", + "intVal": "1", + "boolValFromEnv": "boolVal", + "floatValFromEnv": "floatVal", + }, + ResolvedEnv: map[string]string{ + "boolVal": "true", + "floatVal": "1.1", + }, + AuthParams: map[string]string{ + "auth": "authValue", + }, + } + + type testStruct struct { + StringVal string `keda:"name=stringVal, parsingOrder=triggerMetadata"` + IntVal int `keda:"name=intVal, parsingOrder=triggerMetadata"` + BoolVal bool `keda:"name=boolVal, parsingOrder=resolvedEnv"` + FloatVal float64 `keda:"name=floatVal, parsingOrder=resolvedEnv"` + AuthVal string `keda:"name=auth, parsingOrder=authParams"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + + Expect(ts.StringVal).To(Equal("value1")) + Expect(ts.IntVal).To(Equal(1)) + Expect(ts.BoolVal).To(BeTrue()) + Expect(ts.FloatVal).To(Equal(1.1)) + Expect(ts.AuthVal).To(Equal("authValue")) +} + +// TestParsingOrder tests the parsing order +func TestParsingOrder(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "stringVal": "value1", + "intVal": "1", + "intValFromEnv": "intVal", + "floatVal": "1.1", + "floatValFromEnv": "floatVal", + }, + ResolvedEnv: map[string]string{ + "stringVal": "value2", + "intVal": "2", + "floatVal": "2.2", + }, + } + + type testStruct struct { + StringVal string `keda:"name=stringVal, parsingOrder=resolvedEnv;triggerMetadata"` + IntVal int `keda:"name=intVal, parsingOrder=triggerMetadata;resolvedEnv"` + FloatVal float64 `keda:"name=floatVal, parsingOrder=resolvedEnv;triggerMetadata"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + + Expect(ts.StringVal).To(Equal("value1")) + Expect(ts.IntVal).To(Equal(1)) + Expect(ts.FloatVal).To(Equal(2.2)) +} + +// TestOptional tests the optional tag +func TestOptional(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "stringVal": "value1", + }, + } + + type testStruct struct { + StringVal string `keda:"name=stringVal, parsingOrder=triggerMetadata"` + IntValOptional int `keda:"name=intVal, parsingOrder=triggerMetadata, optional"` + IntValAlsoOptional int `keda:"name=intVal, parsingOrder=triggerMetadata, optional=true"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + + Expect(ts.StringVal).To(Equal("value1")) + Expect(ts.IntValOptional).To(Equal(0)) + Expect(ts.IntValAlsoOptional).To(Equal(0)) +} + +// TestMissing tests the missing parameter for compulsory tag +func TestMissing(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{} + + type testStruct struct { + StringVal string `keda:"name=stringVal, parsingOrder=triggerMetadata"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(MatchError(`missing required parameter "stringVal" in [triggerMetadata]`)) +} + +// TestDeprecated tests the deprecated tag +func TestDeprecated(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "stringVal": "value1", + }, + } + + type testStruct struct { + StringVal string `keda:"name=stringVal, parsingOrder=triggerMetadata, deprecated=deprecated"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(MatchError(`parameter "stringVal" is deprecated`)) + + sc2 := &ScalerConfig{ + TriggerMetadata: map[string]string{}, + } + + ts2 := testStruct{} + err = sc2.TypedConfig(&ts2) + Expect(err).To(BeNil()) +} + +// TestDefaultValue tests the default tag +func TestDefaultValue(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "stringVal": "value1", + }, + } + + type testStruct struct { + BoolVal bool `keda:"name=boolVal, parsingOrder=triggerMetadata, optional, default=true"` + StringVal string `keda:"name=stringVal, parsingOrder=triggerMetadata, optional, default=d"` + StringVal2 string `keda:"name=stringVal2, parsingOrder=triggerMetadata, optional, default=d"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + + Expect(ts.BoolVal).To(Equal(true)) + Expect(ts.StringVal).To(Equal("value1")) + Expect(ts.StringVal2).To(Equal("d")) +} + +// TestMap tests the map type +func TestMap(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "mapVal": "key1=1,key2=2", + }, + } + + type testStruct struct { + MapVal map[string]int `keda:"name=mapVal, parsingOrder=triggerMetadata"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.MapVal).To(HaveLen(2)) + Expect(ts.MapVal["key1"]).To(Equal(1)) + Expect(ts.MapVal["key2"]).To(Equal(2)) +} + +// TestSlice tests the slice type +func TestSlice(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "sliceVal": "1,2,3", + "sliceValWithSpaces": "1, 2, 3", + }, + } + + type testStruct struct { + SliceVal []int `keda:"name=sliceVal, parsingOrder=triggerMetadata"` + SliceValWithSpaces []int `keda:"name=sliceValWithSpaces, parsingOrder=triggerMetadata"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.SliceVal).To(HaveLen(3)) + Expect(ts.SliceVal[0]).To(Equal(1)) + Expect(ts.SliceVal[1]).To(Equal(2)) + Expect(ts.SliceVal[2]).To(Equal(3)) + Expect(ts.SliceValWithSpaces).To(HaveLen(3)) + Expect(ts.SliceValWithSpaces[0]).To(Equal(1)) + Expect(ts.SliceValWithSpaces[1]).To(Equal(2)) + Expect(ts.SliceValWithSpaces[2]).To(Equal(3)) +} + +// TestEnum tests the enum type +func TestEnum(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "enumVal": "value1", + "enumSlice": "value1, value2", + }, + } + + type testStruct struct { + EnumVal string `keda:"name=enumVal, parsingOrder=triggerMetadata, enum=value1;value2"` + EnumSlice []string `keda:"name=enumSlice, parsingOrder=triggerMetadata, enum=value1;value2, optional"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.EnumVal).To(Equal("value1")) + Expect(ts.EnumSlice).To(HaveLen(2)) + Expect(ts.EnumSlice).To(ConsistOf("value1", "value2")) + + sc2 := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "enumVal": "value3", + }, + } + + ts2 := testStruct{} + err = sc2.TypedConfig(&ts2) + Expect(err).To(MatchError(`parameter "enumVal" value "value3" must be one of [value1 value2]`)) +} + +// TestExclusive tests the exclusive type +func TestExclusive(t *testing.T) { + RegisterTestingT(t) + + type testStruct struct { + IntVal []int `keda:"name=intVal, parsingOrder=triggerMetadata, exclusive=1;4;5"` + } + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "intVal": "1,2,3", + }, + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + + sc2 := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "intVal": "1,4", + }, + } + + ts2 := testStruct{} + err = sc2.TypedConfig(&ts2) + Expect(err).To(MatchError(`parameter "intVal" value "1,4" must contain only one of [1 4 5]`)) +} + +// TestURLValues tests the url.Values type +func TestURLValues(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "endpointParams": "key1=value1&key2=value2&key1=value3", + }, + } + + type testStruct struct { + EndpointParams url.Values `keda:"name=endpointParams, parsingOrder=authParams"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.EndpointParams).To(HaveLen(2)) + Expect(ts.EndpointParams).To(HaveKey("key1")) + Expect(ts.EndpointParams).To(HaveKey("key2")) + Expect(ts.EndpointParams["key1"]).To(ConsistOf("value1", "value3")) + Expect(ts.EndpointParams["key2"]).To(ConsistOf("value2")) +} + +// TestGenericMap tests the generic map type that is structurally similar to url.Values +func TestGenericMap(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "endpointParams": "key1=value1,key2=value2,key3=value3", + }, + } + + // structurally similar to url.Values but should behave as generic map + type testStruct struct { + EndpointParams map[string][]string `keda:"name=endpointParams, parsingOrder=authParams"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.EndpointParams).To(HaveLen(3)) + Expect(ts.EndpointParams).To(HaveKey("key1")) + Expect(ts.EndpointParams).To(HaveKey("key2")) + Expect(ts.EndpointParams).To(HaveKey("key3")) + Expect(ts.EndpointParams["key1"]).To(ConsistOf("value1")) + Expect(ts.EndpointParams["key2"]).To(ConsistOf("value2")) + Expect(ts.EndpointParams["key3"]).To(ConsistOf("value3")) +} + +// TestNestedStruct tests the nested struct type +func TestNestedStruct(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "username": "user", + "password": "pass", + }, + } + + type basicAuth struct { + Username string `keda:"name=username, parsingOrder=authParams"` + Password string `keda:"name=password, parsingOrder=authParams"` + } + + type testStruct struct { + BA basicAuth `keda:""` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.BA.Username).To(Equal("user")) + Expect(ts.BA.Password).To(Equal("pass")) +} + +// TestEmbeddedStruct tests the embedded struct type +func TestEmbeddedStruct(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "username": "user", + "password": "pass", + }, + } + + type testStruct struct { + BasicAuth struct { + Username string `keda:"name=username, parsingOrder=authParams"` + Password string `keda:"name=password, parsingOrder=authParams"` + } `keda:""` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.BasicAuth.Username).To(Equal("user")) + Expect(ts.BasicAuth.Password).To(Equal("pass")) +} + +// TestWrongNestedStruct tests the wrong nested type +func TestWrongNestedStruct(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "username": "user", + "password": "pass", + }, + } + + type testStruct struct { + WrongNesting int `keda:""` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(MatchError(`nested parameter "WrongNesting" must be a struct, has kind "int"`)) +} + +// TestNestedOptional tests the nested optional type +func TestNestedOptional(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "username": "user", + }, + } + + type basicAuth struct { + Username string `keda:"name=username, parsingOrder=authParams"` + Password string `keda:"name=password, parsingOrder=authParams, optional"` + AlsoOptionalThanksToParent string `keda:"name=optional, parsingOrder=authParams"` + } + + type testStruct struct { + BA basicAuth `keda:"optional"` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.BA.Username).To(Equal("user")) + Expect(ts.BA.Password).To(Equal("")) + Expect(ts.BA.AlsoOptionalThanksToParent).To(Equal("")) +} + +// TestNestedPointer tests the nested pointer type +func TestNestedPointer(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + AuthParams: map[string]string{ + "username": "user", + "password": "pass", + }, + } + + type basicAuth struct { + Username string `keda:"name=username, parsingOrder=authParams"` + Password string `keda:"name=password, parsingOrder=authParams"` + } + + type testStruct struct { + BA *basicAuth `keda:""` + } + + ts := testStruct{} + err := sc.TypedConfig(&ts) + Expect(err).To(BeNil()) + Expect(ts.BA).ToNot(BeNil()) + Expect(ts.BA.Username).To(Equal("user")) + Expect(ts.BA.Password).To(Equal("pass")) +} From d5d0b102de8f2862f0fa9e4022a435693a016565 Mon Sep 17 00:00:00 2001 From: Jan Wozniak Date: Wed, 10 Apr 2024 14:04:35 +0200 Subject: [PATCH 2/5] reimplement prometheus config parsing Signed-off-by: Jan Wozniak --- pkg/scalers/prometheus_scaler.go | 148 ++++++-------------------- pkg/scalers/prometheus_scaler_test.go | 18 ++-- 2 files changed, 42 insertions(+), 124 deletions(-) diff --git a/pkg/scalers/prometheus_scaler.go b/pkg/scalers/prometheus_scaler.go index b46cf271501..9c7bddb6702 100644 --- a/pkg/scalers/prometheus_scaler.go +++ b/pkg/scalers/prometheus_scaler.go @@ -38,10 +38,6 @@ const ( unsafeSsl = "unsafeSsl" ) -var ( - defaultIgnoreNullValues = true -) - type prometheusScaler struct { metricType v2.MetricTargetType metadata *prometheusMetadata @@ -49,22 +45,24 @@ type prometheusScaler struct { logger logr.Logger } +// sometimes should consider there is an error we can accept +// default value is true/t, to ignore the null value return from prometheus +// change to false/f if can not accept prometheus return null values +// https://github.com/kedacore/keda/issues/3065 type prometheusMetadata struct { - serverAddress string - query string - queryParameters map[string]string - threshold float64 - activationThreshold float64 - prometheusAuth *authentication.AuthMeta - namespace string - triggerIndex int - customHeaders map[string]string - // sometimes should consider there is an error we can accept - // default value is true/t, to ignore the null value return from prometheus - // change to false/f if can not accept prometheus return null values - // https://github.com/kedacore/keda/issues/3065 - ignoreNullValues bool - unsafeSsl bool + prometheusAuth *authentication.AuthMeta + triggerIndex int + + ServerAddress string `keda:"name=serverAddress, parsingOrder=triggerMetadata"` + Query string `keda:"name=query, parsingOrder=triggerMetadata"` + QueryParameters map[string]string `keda:"name=queryParameters, parsingOrder=triggerMetadata, optional"` + Threshold float64 `keda:"name=threshold, parsingOrder=triggerMetadata"` + ActivationThreshold float64 `keda:"name=activationThreshold, parsingOrder=triggerMetadata, optional"` + Namespace string `keda:"name=namespace, parsingOrder=triggerMetadata, optional"` + CustomHeaders map[string]string `keda:"name=customHeaders, parsingOrder=triggerMetadata, optional"` + IgnoreNullValues bool `keda:"name=ignoreNullValues, parsingOrder=triggerMetadata, optional, default=true"` + UnsafeSSL bool `keda:"name=unsafeSsl, parsingOrder=triggerMetadata, optional"` + CortexOrgID string `keda:"name=cortexOrgID, parsingOrder=triggerMetadata, optional, deprecated=use customHeaders instead"` } type promQueryResult struct { @@ -93,7 +91,7 @@ func NewPrometheusScaler(config *scalersconfig.ScalerConfig) (Scaler, error) { return nil, fmt.Errorf("error parsing prometheus metadata: %w", err) } - httpClient := kedautil.CreateHTTPClient(config.GlobalHTTPTimeout, meta.unsafeSsl) + httpClient := kedautil.CreateHTTPClient(config.GlobalHTTPTimeout, meta.UnsafeSSL) if meta.prometheusAuth != nil { if meta.prometheusAuth.CA != "" || meta.prometheusAuth.EnableTLS { @@ -153,91 +151,11 @@ func NewPrometheusScaler(config *scalersconfig.ScalerConfig) (Scaler, error) { func parsePrometheusMetadata(config *scalersconfig.ScalerConfig) (meta *prometheusMetadata, err error) { meta = &prometheusMetadata{} - - if val, ok := config.TriggerMetadata[promServerAddress]; ok && val != "" { - meta.serverAddress = val - } else { - return nil, fmt.Errorf("no %s given", promServerAddress) - } - - if val, ok := config.TriggerMetadata[promQuery]; ok && val != "" { - meta.query = val - } else { - return nil, fmt.Errorf("no %s given", promQuery) - } - - if val, ok := config.TriggerMetadata[promQueryParameters]; ok && val != "" { - queryParameters, err := kedautil.ParseStringList(val) - if err != nil { - return nil, fmt.Errorf("error parsing %s: %w", promQueryParameters, err) - } - - meta.queryParameters = queryParameters - } - - if val, ok := config.TriggerMetadata[promThreshold]; ok && val != "" { - t, err := strconv.ParseFloat(val, 64) - if err != nil { - return nil, fmt.Errorf("error parsing %s: %w", promThreshold, err) - } - - meta.threshold = t - } else { - if config.AsMetricSource { - meta.threshold = 0 - } else { - return nil, fmt.Errorf("no %s given", promThreshold) - } - } - - meta.activationThreshold = 0 - if val, ok := config.TriggerMetadata[promActivationThreshold]; ok { - t, err := strconv.ParseFloat(val, 64) - if err != nil { - return nil, fmt.Errorf("activationThreshold parsing error %w", err) - } - - meta.activationThreshold = t - } - - if val, ok := config.TriggerMetadata[promNamespace]; ok && val != "" { - meta.namespace = val - } - - if val, ok := config.TriggerMetadata[promCortexScopeOrgID]; ok && val != "" { - return nil, fmt.Errorf("cortexOrgID is deprecated, please use customHeaders instead") - } - - if val, ok := config.TriggerMetadata[promCustomHeaders]; ok && val != "" { - customHeaders, err := kedautil.ParseStringList(val) - if err != nil { - return nil, fmt.Errorf("error parsing %s: %w", promCustomHeaders, err) - } - - meta.customHeaders = customHeaders - } - - meta.ignoreNullValues = defaultIgnoreNullValues - if val, ok := config.TriggerMetadata[ignoreNullValues]; ok && val != "" { - ignoreNullValues, err := strconv.ParseBool(val) - if err != nil { - return nil, fmt.Errorf("err incorrect value for ignoreNullValues given: %s, please use true or false", val) - } - meta.ignoreNullValues = ignoreNullValues - } - - meta.unsafeSsl = false - if val, ok := config.TriggerMetadata[unsafeSsl]; ok && val != "" { - unsafeSslValue, err := strconv.ParseBool(val) - if err != nil { - return nil, fmt.Errorf("error parsing %s: %w", unsafeSsl, err) - } - - meta.unsafeSsl = unsafeSslValue + if err := config.TypedConfig(meta); err != nil { + return nil, fmt.Errorf("error parsing prometheus metadata: %w", err) } meta.triggerIndex = config.TriggerIndex - err = parseAuthConfig(config, meta) if err != nil { return nil, err @@ -274,7 +192,7 @@ func (s *prometheusScaler) GetMetricSpecForScaling(context.Context) []v2.MetricS Metric: v2.MetricIdentifier{ Name: GenerateMetricNameWithIndex(s.metadata.triggerIndex, metricName), }, - Target: GetMetricTargetMili(s.metricType, s.metadata.threshold), + Target: GetMetricTargetMili(s.metricType, s.metadata.Threshold), } metricSpec := v2.MetricSpec{ External: externalMetric, Type: externalMetricType, @@ -284,15 +202,15 @@ func (s *prometheusScaler) GetMetricSpecForScaling(context.Context) []v2.MetricS func (s *prometheusScaler) ExecutePromQuery(ctx context.Context) (float64, error) { t := time.Now().UTC().Format(time.RFC3339) - queryEscaped := url_pkg.QueryEscape(s.metadata.query) - url := fmt.Sprintf("%s/api/v1/query?query=%s&time=%s", s.metadata.serverAddress, queryEscaped, t) + queryEscaped := url_pkg.QueryEscape(s.metadata.Query) + url := fmt.Sprintf("%s/api/v1/query?query=%s&time=%s", s.metadata.ServerAddress, queryEscaped, t) // set 'namespace' parameter for namespaced Prometheus requests (e.g. for Thanos Querier) - if s.metadata.namespace != "" { - url = fmt.Sprintf("%s&namespace=%s", url, s.metadata.namespace) + if s.metadata.Namespace != "" { + url = fmt.Sprintf("%s&namespace=%s", url, s.metadata.Namespace) } - for queryParameterKey, queryParameterValue := range s.metadata.queryParameters { + for queryParameterKey, queryParameterValue := range s.metadata.QueryParameters { queryParameterKeyEscaped := url_pkg.QueryEscape(queryParameterKey) queryParameterValueEscaped := url_pkg.QueryEscape(queryParameterValue) url = fmt.Sprintf("%s&%s=%s", url, queryParameterKeyEscaped, queryParameterValueEscaped) @@ -303,7 +221,7 @@ func (s *prometheusScaler) ExecutePromQuery(ctx context.Context) (float64, error return -1, err } - for headerName, headerValue := range s.metadata.customHeaders { + for headerName, headerValue := range s.metadata.CustomHeaders { req.Header.Add(headerName, headerValue) } @@ -345,22 +263,22 @@ func (s *prometheusScaler) ExecutePromQuery(ctx context.Context) (float64, error // allow for zero element or single element result sets if len(result.Data.Result) == 0 { - if s.metadata.ignoreNullValues { + if s.metadata.IgnoreNullValues { return 0, nil } return -1, fmt.Errorf("prometheus metrics 'prometheus' target may be lost, the result is empty") } else if len(result.Data.Result) > 1 { - return -1, fmt.Errorf("prometheus query %s returned multiple elements", s.metadata.query) + return -1, fmt.Errorf("prometheus query %s returned multiple elements", s.metadata.Query) } valueLen := len(result.Data.Result[0].Value) if valueLen == 0 { - if s.metadata.ignoreNullValues { + if s.metadata.IgnoreNullValues { return 0, nil } return -1, fmt.Errorf("prometheus metrics 'prometheus' target may be lost, the value list is empty") } else if valueLen < 2 { - return -1, fmt.Errorf("prometheus query %s didn't return enough values", s.metadata.query) + return -1, fmt.Errorf("prometheus query %s didn't return enough values", s.metadata.Query) } val := result.Data.Result[0].Value[1] @@ -374,7 +292,7 @@ func (s *prometheusScaler) ExecutePromQuery(ctx context.Context) (float64, error } if math.IsInf(v, 0) { - if s.metadata.ignoreNullValues { + if s.metadata.IgnoreNullValues { return 0, nil } err := fmt.Errorf("promtheus query returns %f", v) @@ -394,5 +312,5 @@ func (s *prometheusScaler) GetMetricsAndActivity(ctx context.Context, metricName metric := GenerateMetricInMili(metricName, val) - return []external_metrics.ExternalMetricValue{metric}, val > s.metadata.activationThreshold, nil + return []external_metrics.ExternalMetricValue{metric}, val > s.metadata.ActivationThreshold, nil } diff --git a/pkg/scalers/prometheus_scaler_test.go b/pkg/scalers/prometheus_scaler_test.go index 844920f6d12..04dd524551e 100644 --- a/pkg/scalers/prometheus_scaler_test.go +++ b/pkg/scalers/prometheus_scaler_test.go @@ -317,9 +317,9 @@ func TestPrometheusScalerExecutePromQuery(t *testing.T) { scaler := prometheusScaler{ metadata: &prometheusMetadata{ - serverAddress: server.URL, - ignoreNullValues: testData.ignoreNullValues, - unsafeSsl: testData.unsafeSsl, + ServerAddress: server.URL, + IgnoreNullValues: testData.ignoreNullValues, + UnsafeSSL: testData.unsafeSsl, }, httpClient: http.DefaultClient, logger: logr.Discard(), @@ -366,9 +366,9 @@ func TestPrometheusScalerCustomHeaders(t *testing.T) { scaler := prometheusScaler{ metadata: &prometheusMetadata{ - serverAddress: server.URL, - customHeaders: customHeadersValue, - ignoreNullValues: testData.ignoreNullValues, + ServerAddress: server.URL, + CustomHeaders: customHeadersValue, + IgnoreNullValues: testData.ignoreNullValues, }, httpClient: http.DefaultClient, } @@ -410,9 +410,9 @@ func TestPrometheusScalerExecutePromQueryParameters(t *testing.T) { })) scaler := prometheusScaler{ metadata: &prometheusMetadata{ - serverAddress: server.URL, - queryParameters: queryParametersValue, - ignoreNullValues: testData.ignoreNullValues, + ServerAddress: server.URL, + QueryParameters: queryParametersValue, + IgnoreNullValues: testData.ignoreNullValues, }, httpClient: http.DefaultClient, } From 43b6c10c56a105492d38b88bd1362b77340a0a8e Mon Sep 17 00:00:00 2001 From: Jan Wozniak Date: Wed, 8 May 2024 12:26:02 +0200 Subject: [PATCH 3/5] reimplement prometheus auth parsing Signed-off-by: Jan Wozniak --- .../authentication/authentication_types.go | 128 ++++++++++++++++++ pkg/scalers/prometheus_scaler.go | 63 ++++----- pkg/scalers/prometheus_scaler_test.go | 10 +- 3 files changed, 163 insertions(+), 38 deletions(-) diff --git a/pkg/scalers/authentication/authentication_types.go b/pkg/scalers/authentication/authentication_types.go index 09894572775..be272fc3290 100644 --- a/pkg/scalers/authentication/authentication_types.go +++ b/pkg/scalers/authentication/authentication_types.go @@ -1,6 +1,7 @@ package authentication import ( + "fmt" "net/url" "time" ) @@ -31,6 +32,8 @@ const ( FastHTTP // FastHTTP Fast http client. ) +// AuthMeta is the metadata for the authentication types +// Deprecated: use Config instead type AuthMeta struct { // bearer auth EnableBearerAuth bool @@ -61,6 +64,131 @@ type AuthMeta struct { CustomAuthValue string } +// BasicAuth is a basic authentication type +type BasicAuth struct { + Username string `keda:"name=username, parsingOrder=authParams"` + Password string `keda:"name=password, parsingOrder=authParams"` +} + +// CertAuth is a client certificate authentication type +type CertAuth struct { + Cert string `keda:"name=cert, parsingOrder=authParams"` + Key string `keda:"name=key, parsingOrder=authParams"` + CA string `keda:"name=ca, parsingOrder=authParams"` +} + +// OAuth is an oAuth2 authentication type +type OAuth struct { + OauthTokenURI string `keda:"name=oauthTokenURI, parsingOrder=authParams"` + Scopes []string `keda:"name=scopes, parsingOrder=authParams"` + ClientID string `keda:"name=clientID, parsingOrder=authParams"` + ClientSecret string `keda:"name=clientSecret, parsingOrder=authParams"` + EndpointParams url.Values `keda:"name=endpointParams, parsingOrder=authParams"` +} + +// CustomAuth is a custom header authentication type +type CustomAuth struct { + CustomAuthHeader string `keda:"name=customAuthHeader, parsingOrder=authParams"` + CustomAuthValue string `keda:"name=customAuthValue, parsingOrder=authParams"` +} + +// Config is the configuration for the authentication types +type Config struct { + Modes []Type `keda:"name=authModes, parsingOrder=triggerMetadata, enum=apiKey;basic;tls;bearer;custom;oauth, exclusive=bearer;basic;oauth, optional"` + + BearerToken string `keda:"name=bearerToken, parsingOrder=authParams, optional"` + BasicAuth `keda:"optional"` + CertAuth `keda:"optional"` + OAuth `keda:"optional"` + CustomAuth `keda:"optional"` +} + +// Disabled returns true if no auth modes are enabled +func (c *Config) Disabled() bool { + return c == nil || len(c.Modes) == 0 +} + +// Enabled returns true if given auth mode is enabled +func (c *Config) Enabled(mode Type) bool { + for _, m := range c.Modes { + if m == mode { + return true + } + } + return false +} + +// helpers for checking enabled auth modes +func (c *Config) EnabledTLS() bool { return c.Enabled(TLSAuthType) } +func (c *Config) EnabledBasicAuth() bool { return c.Enabled(BasicAuthType) } +func (c *Config) EnabledBearerAuth() bool { return c.Enabled(BearerAuthType) } +func (c *Config) EnabledOAuth() bool { return c.Enabled(OAuthType) } +func (c *Config) EnabledCustomAuth() bool { return c.Enabled(CustomAuthType) } + +// GetBearerToken returns the bearer token with the Bearer prefix +func (c *Config) GetBearerToken() string { + return fmt.Sprintf("Bearer %s", c.BearerToken) +} + +// Validate validates the Config and returns an error if it is invalid +func (c *Config) Validate() error { + if c.Disabled() { + return nil + } + if c.EnabledBearerAuth() && c.BearerToken == "" { + return fmt.Errorf("bearer token is required when bearer auth is enabled") + } + if c.EnabledBasicAuth() && c.Username == "" { + return fmt.Errorf("username is required when basic auth is enabled") + } + if c.EnabledTLS() && (c.Cert == "" || c.Key == "") { + return fmt.Errorf("cert and key are required when tls auth is enabled") + } + if c.EnabledOAuth() && (c.OauthTokenURI == "" || c.ClientID == "" || c.ClientSecret == "") { + return fmt.Errorf("oauthTokenURI, clientID and clientSecret are required when oauth is enabled") + } + if c.EnabledCustomAuth() && (c.CustomAuthHeader == "" || c.CustomAuthValue == "") { + return fmt.Errorf("customAuthHeader and customAuthValue are required when custom auth is enabled") + } + return nil +} + +// ToAuthMeta converts the Config to deprecated AuthMeta +func (c *Config) ToAuthMeta() *AuthMeta { + if c.Disabled() { + return nil + } + return &AuthMeta{ + // bearer auth + EnableBearerAuth: c.EnabledBearerAuth(), + BearerToken: c.BearerToken, + + // basic auth + EnableBasicAuth: c.EnabledBasicAuth(), + Username: c.Username, + Password: c.Password, + + // client certification + EnableTLS: c.EnabledTLS(), + Cert: c.Cert, + Key: c.Key, + CA: c.CA, + + // oAuth2 + EnableOAuth: c.EnabledOAuth(), + OauthTokenURI: c.OauthTokenURI, + Scopes: c.Scopes, + ClientID: c.ClientID, + ClientSecret: c.ClientSecret, + EndpointParams: c.EndpointParams, + + // custom auth header + EnableCustomAuth: c.EnabledCustomAuth(), + CustomAuthHeader: c.CustomAuthHeader, + CustomAuthValue: c.CustomAuthValue, + } +} + type HTTPTransport struct { MaxIdleConnDuration time.Duration ReadTimeout time.Duration diff --git a/pkg/scalers/prometheus_scaler.go b/pkg/scalers/prometheus_scaler.go index 9c7bddb6702..2bd519e2118 100644 --- a/pkg/scalers/prometheus_scaler.go +++ b/pkg/scalers/prometheus_scaler.go @@ -50,19 +50,21 @@ type prometheusScaler struct { // change to false/f if can not accept prometheus return null values // https://github.com/kedacore/keda/issues/3065 type prometheusMetadata struct { - prometheusAuth *authentication.AuthMeta - triggerIndex int - - ServerAddress string `keda:"name=serverAddress, parsingOrder=triggerMetadata"` - Query string `keda:"name=query, parsingOrder=triggerMetadata"` - QueryParameters map[string]string `keda:"name=queryParameters, parsingOrder=triggerMetadata, optional"` - Threshold float64 `keda:"name=threshold, parsingOrder=triggerMetadata"` - ActivationThreshold float64 `keda:"name=activationThreshold, parsingOrder=triggerMetadata, optional"` - Namespace string `keda:"name=namespace, parsingOrder=triggerMetadata, optional"` - CustomHeaders map[string]string `keda:"name=customHeaders, parsingOrder=triggerMetadata, optional"` - IgnoreNullValues bool `keda:"name=ignoreNullValues, parsingOrder=triggerMetadata, optional, default=true"` - UnsafeSSL bool `keda:"name=unsafeSsl, parsingOrder=triggerMetadata, optional"` - CortexOrgID string `keda:"name=cortexOrgID, parsingOrder=triggerMetadata, optional, deprecated=use customHeaders instead"` + triggerIndex int + + PrometheusAuth *authentication.Config `keda:"optional"` + ServerAddress string `keda:"name=serverAddress, parsingOrder=triggerMetadata"` + Query string `keda:"name=query, parsingOrder=triggerMetadata"` + QueryParameters map[string]string `keda:"name=queryParameters, parsingOrder=triggerMetadata, optional"` + Threshold float64 `keda:"name=threshold, parsingOrder=triggerMetadata"` + ActivationThreshold float64 `keda:"name=activationThreshold, parsingOrder=triggerMetadata, optional"` + Namespace string `keda:"name=namespace, parsingOrder=triggerMetadata, optional"` + CustomHeaders map[string]string `keda:"name=customHeaders, parsingOrder=triggerMetadata, optional"` + IgnoreNullValues bool `keda:"name=ignoreNullValues, parsingOrder=triggerMetadata, optional, default=true"` + UnsafeSSL bool `keda:"name=unsafeSsl, parsingOrder=triggerMetadata, optional"` + + // deprecated + CortexOrgID string `keda:"name=cortexOrgID, parsingOrder=triggerMetadata, optional, deprecated=use customHeaders instead"` } type promQueryResult struct { @@ -93,12 +95,12 @@ func NewPrometheusScaler(config *scalersconfig.ScalerConfig) (Scaler, error) { httpClient := kedautil.CreateHTTPClient(config.GlobalHTTPTimeout, meta.UnsafeSSL) - if meta.prometheusAuth != nil { - if meta.prometheusAuth.CA != "" || meta.prometheusAuth.EnableTLS { + if !meta.PrometheusAuth.Disabled() { + if meta.PrometheusAuth.CA != "" || meta.PrometheusAuth.EnabledTLS() { // create http.RoundTripper with auth settings from ScalerConfig transport, err := authentication.CreateHTTPRoundTripper( authentication.NetHTTP, - meta.prometheusAuth, + meta.PrometheusAuth.ToAuthMeta(), ) if err != nil { logger.V(1).Error(err, "init Prometheus client http transport") @@ -156,7 +158,7 @@ func parsePrometheusMetadata(config *scalersconfig.ScalerConfig) (meta *promethe } meta.triggerIndex = config.TriggerIndex - err = parseAuthConfig(config, meta) + err = checkAuthConfigWithPodIdentity(config, meta) if err != nil { return nil, err } @@ -164,18 +166,13 @@ func parsePrometheusMetadata(config *scalersconfig.ScalerConfig) (meta *promethe return meta, nil } -func parseAuthConfig(config *scalersconfig.ScalerConfig, meta *prometheusMetadata) error { - // parse auth configs from ScalerConfig - auth, err := authentication.GetAuthConfigs(config.TriggerMetadata, config.AuthParams) - if err != nil { - return err +func checkAuthConfigWithPodIdentity(config *scalersconfig.ScalerConfig, meta *prometheusMetadata) error { + if meta == nil || meta.PrometheusAuth.Disabled() { + return nil } - - if auth != nil && !(config.PodIdentity.Provider == kedav1alpha1.PodIdentityProviderNone || config.PodIdentity.Provider == "") { + if !(config.PodIdentity.Provider == kedav1alpha1.PodIdentityProviderNone || config.PodIdentity.Provider == "") { return fmt.Errorf("pod identity cannot be enabled with other auth types") } - meta.prometheusAuth = auth - return nil } @@ -226,14 +223,14 @@ func (s *prometheusScaler) ExecutePromQuery(ctx context.Context) (float64, error } switch { - case s.metadata.prometheusAuth == nil: + case s.metadata.PrometheusAuth.Disabled(): break - case s.metadata.prometheusAuth.EnableBearerAuth: - req.Header.Set("Authorization", authentication.GetBearerToken(s.metadata.prometheusAuth)) - case s.metadata.prometheusAuth.EnableBasicAuth: - req.SetBasicAuth(s.metadata.prometheusAuth.Username, s.metadata.prometheusAuth.Password) - case s.metadata.prometheusAuth.EnableCustomAuth: - req.Header.Set(s.metadata.prometheusAuth.CustomAuthHeader, s.metadata.prometheusAuth.CustomAuthValue) + case s.metadata.PrometheusAuth.EnabledBearerAuth(): + req.Header.Set("Authorization", s.metadata.PrometheusAuth.GetBearerToken()) + case s.metadata.PrometheusAuth.EnabledBasicAuth(): + req.SetBasicAuth(s.metadata.PrometheusAuth.Username, s.metadata.PrometheusAuth.Password) + case s.metadata.PrometheusAuth.EnabledCustomAuth(): + req.Header.Set(s.metadata.PrometheusAuth.CustomAuthHeader, s.metadata.PrometheusAuth.CustomAuthValue) } r, err := s.httpClient.Do(req) diff --git a/pkg/scalers/prometheus_scaler_test.go b/pkg/scalers/prometheus_scaler_test.go index 04dd524551e..694a6a7c624 100644 --- a/pkg/scalers/prometheus_scaler_test.go +++ b/pkg/scalers/prometheus_scaler_test.go @@ -162,11 +162,11 @@ func TestPrometheusScalerAuthParams(t *testing.T) { } if err == nil { - if meta.prometheusAuth != nil { - if (meta.prometheusAuth.EnableBearerAuth && !strings.Contains(testData.metadata["authModes"], "bearer")) || - (meta.prometheusAuth.EnableBasicAuth && !strings.Contains(testData.metadata["authModes"], "basic")) || - (meta.prometheusAuth.EnableTLS && !strings.Contains(testData.metadata["authModes"], "tls")) || - (meta.prometheusAuth.EnableCustomAuth && !strings.Contains(testData.metadata["authModes"], "custom")) { + if !meta.PrometheusAuth.Disabled() { + if (meta.PrometheusAuth.EnabledBearerAuth() && !strings.Contains(testData.metadata["authModes"], "bearer")) || + (meta.PrometheusAuth.EnabledBasicAuth() && !strings.Contains(testData.metadata["authModes"], "basic")) || + (meta.PrometheusAuth.EnabledTLS() && !strings.Contains(testData.metadata["authModes"], "tls")) || + (meta.PrometheusAuth.EnabledCustomAuth() && !strings.Contains(testData.metadata["authModes"], "custom")) { t.Error("wrong auth mode detected") } } From 8b9f5cfc229f762792339ecd3934cdb34a93b260 Mon Sep 17 00:00:00 2001 From: Jan Wozniak Date: Fri, 10 May 2024 08:54:46 +0200 Subject: [PATCH 4/5] wip changelog Signed-off-by: Jan Wozniak --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 961820a6343..d3bb3b658b0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -59,7 +59,7 @@ To learn more about active deprecations, we recommend checking [GitHub Discussio ### New - TODO ([#XXX](https://github.com/kedacore/keda/issues/XXX)) -- **General**: Declarative scaler config parsing ([#5037](https://github.com/kedacore/keda/issues/5037)) +- **General**: Declarative parsing of scaler config ([#5037](https://github.com/kedacore/keda/issues/5037)) #### Experimental From 07676baf9e86e8921b75c469e1444198bccf657a Mon Sep 17 00:00:00 2001 From: Jan Wozniak Date: Fri, 10 May 2024 15:28:28 +0200 Subject: [PATCH 5/5] review items * rename 'parsingOrder' tag to 'order' * rename 'exclusive' tag to 'exclusiveSet' * improve parsing 'order' tag behaviour + additional coverage Signed-off-by: Jan Wozniak --- .../authentication/authentication_types.go | 28 ++--- pkg/scalers/prometheus_scaler.go | 22 ++-- pkg/scalers/scalersconfig/typed_config.go | 59 ++++++---- .../scalersconfig/typed_config_test.go | 103 ++++++++++++------ 4 files changed, 134 insertions(+), 78 deletions(-) diff --git a/pkg/scalers/authentication/authentication_types.go b/pkg/scalers/authentication/authentication_types.go index be272fc3290..908010e2483 100644 --- a/pkg/scalers/authentication/authentication_types.go +++ b/pkg/scalers/authentication/authentication_types.go @@ -66,37 +66,37 @@ type AuthMeta struct { // BasicAuth is a basic authentication type type BasicAuth struct { - Username string `keda:"name=username, parsingOrder=authParams"` - Password string `keda:"name=password, parsingOrder=authParams"` + Username string `keda:"name=username, order=authParams"` + Password string `keda:"name=password, order=authParams"` } // CertAuth is a client certificate authentication type type CertAuth struct { - Cert string `keda:"name=cert, parsingOrder=authParams"` - Key string `keda:"name=key, parsingOrder=authParams"` - CA string `keda:"name=ca, parsingOrder=authParams"` + Cert string `keda:"name=cert, order=authParams"` + Key string `keda:"name=key, order=authParams"` + CA string `keda:"name=ca, order=authParams"` } // OAuth is an oAuth2 authentication type type OAuth struct { - OauthTokenURI string `keda:"name=oauthTokenURI, parsingOrder=authParams"` - Scopes []string `keda:"name=scopes, parsingOrder=authParams"` - ClientID string `keda:"name=clientID, parsingOrder=authParams"` - ClientSecret string `keda:"name=clientSecret, parsingOrder=authParams"` - EndpointParams url.Values `keda:"name=endpointParams, parsingOrder=authParams"` + OauthTokenURI string `keda:"name=oauthTokenURI, order=authParams"` + Scopes []string `keda:"name=scopes, order=authParams"` + ClientID string `keda:"name=clientID, order=authParams"` + ClientSecret string `keda:"name=clientSecret, order=authParams"` + EndpointParams url.Values `keda:"name=endpointParams, order=authParams"` } // CustomAuth is a custom header authentication type type CustomAuth struct { - CustomAuthHeader string `keda:"name=customAuthHeader, parsingOrder=authParams"` - CustomAuthValue string `keda:"name=customAuthValue, parsingOrder=authParams"` + CustomAuthHeader string `keda:"name=customAuthHeader, order=authParams"` + CustomAuthValue string `keda:"name=customAuthValue, order=authParams"` } // Config is the configuration for the authentication types type Config struct { - Modes []Type `keda:"name=authModes, parsingOrder=triggerMetadata, enum=apiKey;basic;tls;bearer;custom;oauth, exclusive=bearer;basic;oauth, optional"` + Modes []Type `keda:"name=authModes, order=triggerMetadata, enum=apiKey;basic;tls;bearer;custom;oauth, exclusiveSet=bearer;basic;oauth, optional"` - BearerToken string `keda:"name=bearerToken, parsingOrder=authParams, optional"` + BearerToken string `keda:"name=bearerToken, order=authParams, optional"` BasicAuth `keda:"optional"` CertAuth `keda:"optional"` OAuth `keda:"optional"` diff --git a/pkg/scalers/prometheus_scaler.go b/pkg/scalers/prometheus_scaler.go index 2bd519e2118..99fc6f9f5ea 100644 --- a/pkg/scalers/prometheus_scaler.go +++ b/pkg/scalers/prometheus_scaler.go @@ -45,7 +45,7 @@ type prometheusScaler struct { logger logr.Logger } -// sometimes should consider there is an error we can accept +// IgnoreNullValues - sometimes should consider there is an error we can accept // default value is true/t, to ignore the null value return from prometheus // change to false/f if can not accept prometheus return null values // https://github.com/kedacore/keda/issues/3065 @@ -53,18 +53,18 @@ type prometheusMetadata struct { triggerIndex int PrometheusAuth *authentication.Config `keda:"optional"` - ServerAddress string `keda:"name=serverAddress, parsingOrder=triggerMetadata"` - Query string `keda:"name=query, parsingOrder=triggerMetadata"` - QueryParameters map[string]string `keda:"name=queryParameters, parsingOrder=triggerMetadata, optional"` - Threshold float64 `keda:"name=threshold, parsingOrder=triggerMetadata"` - ActivationThreshold float64 `keda:"name=activationThreshold, parsingOrder=triggerMetadata, optional"` - Namespace string `keda:"name=namespace, parsingOrder=triggerMetadata, optional"` - CustomHeaders map[string]string `keda:"name=customHeaders, parsingOrder=triggerMetadata, optional"` - IgnoreNullValues bool `keda:"name=ignoreNullValues, parsingOrder=triggerMetadata, optional, default=true"` - UnsafeSSL bool `keda:"name=unsafeSsl, parsingOrder=triggerMetadata, optional"` + ServerAddress string `keda:"name=serverAddress, order=triggerMetadata"` + Query string `keda:"name=query, order=triggerMetadata"` + QueryParameters map[string]string `keda:"name=queryParameters, order=triggerMetadata, optional"` + Threshold float64 `keda:"name=threshold, order=triggerMetadata"` + ActivationThreshold float64 `keda:"name=activationThreshold, order=triggerMetadata, optional"` + Namespace string `keda:"name=namespace, order=triggerMetadata, optional"` + CustomHeaders map[string]string `keda:"name=customHeaders, order=triggerMetadata, optional"` + IgnoreNullValues bool `keda:"name=ignoreNullValues, order=triggerMetadata, optional, default=true"` + UnsafeSSL bool `keda:"name=unsafeSsl, order=triggerMetadata, optional"` // deprecated - CortexOrgID string `keda:"name=cortexOrgID, parsingOrder=triggerMetadata, optional, deprecated=use customHeaders instead"` + CortexOrgID string `keda:"name=cortexOrgID, order=triggerMetadata, optional, deprecated=use customHeaders instead"` } type promQueryResult struct { diff --git a/pkg/scalers/scalersconfig/typed_config.go b/pkg/scalers/scalersconfig/typed_config.go index 23694edcdcb..a47f136f855 100644 --- a/pkg/scalers/scalersconfig/typed_config.go +++ b/pkg/scalers/scalersconfig/typed_config.go @@ -25,6 +25,9 @@ import ( "runtime/debug" "strconv" "strings" + + "golang.org/x/exp/maps" + "golang.org/x/exp/slices" ) // CustomValidator is an interface that can be implemented to validate the configuration of the typed config @@ -42,8 +45,15 @@ const ( AuthParams ParsingOrder = "authParams" ) +// allowedParsingOrderMap is a map with set of valid parsing orders +var allowedParsingOrderMap = map[ParsingOrder]bool{ + TriggerMetadata: true, + ResolvedEnv: true, + AuthParams: true, +} + // separators for field tag structure -// e.g. name=stringVal,parsingOrder=triggerMetadata;resolvedEnv;authParams,optional +// e.g. name=stringVal,order=triggerMetadata;resolvedEnv;authParams,optional const ( tagSeparator = "," tagKeySeparator = "=" @@ -61,10 +71,10 @@ const ( optionalTag = "optional" deprecatedTag = "deprecated" defaultTag = "default" - parsingOrderTag = "parsingOrder" + orderTag = "order" nameTag = "name" enumTag = "enum" - exclusiveTag = "exclusive" + exclusiveSetTag = "exclusiveSet" ) // Params is a struct that represents the parameter list that can be used in the keda tag @@ -78,9 +88,9 @@ type Params struct { // Optional is the 'optional' tag parameter defining if the parameter is optional Optional bool - // ParsingOrder is the 'parsingOrder' tag parameter defining the order in which the parameter is looked up + // Order is the 'order' tag parameter defining the parsing order in which the parameter is looked up // in the triggerMetadata, resolvedEnv or authParams maps - ParsingOrder []ParsingOrder + Order []ParsingOrder // Default is the 'default' tag parameter defining the default value of the parameter if it's not found // in any of the maps from ParsingOrder @@ -93,8 +103,8 @@ type Params struct { // Enum is the 'enum' tag parameter defining the list of possible values for the parameter Enum []string - // Exclusive is the 'exclusive' tag parameter defining the list of values that are mutually exclusive - Exclusive []string + // ExclusiveSet is the 'exclusiveSet' tag parameter defining the list of values that are mutually exclusive + ExclusiveSet []string } // IsNested is a function that returns true if the parameter is nested @@ -181,7 +191,12 @@ func (sc *ScalerConfig) setValue(field reflect.Value, params Params) error { return nil } if !exists && !(params.Optional || params.IsDeprecated()) { - return fmt.Errorf("missing required parameter %q in %v", params.Name, params.ParsingOrder) + if len(params.Order) == 0 { + apo := maps.Keys(allowedParsingOrderMap) + slices.Sort(apo) + return fmt.Errorf("missing required parameter %q, no 'order' tag, provide any from %v", params.Name, apo) + } + return fmt.Errorf("missing required parameter %q in %v", params.Name, params.Order) } if params.Enum != nil { enumMap := make(map[string]bool) @@ -200,9 +215,9 @@ func (sc *ScalerConfig) setValue(field reflect.Value, params Params) error { return fmt.Errorf("parameter %q value %q must be one of %v", params.Name, valFromConfig, params.Enum) } } - if params.Exclusive != nil { + if params.ExclusiveSet != nil { exclusiveMap := make(map[string]bool) - for _, e := range params.Exclusive { + for _, e := range params.ExclusiveSet { exclusiveMap[e] = true } split := strings.Split(valFromConfig, elemSeparator) @@ -214,7 +229,7 @@ func (sc *ScalerConfig) setValue(field reflect.Value, params Params) error { } } if exclusiveCount > 1 { - return fmt.Errorf("parameter %q value %q must contain only one of %v", params.Name, valFromConfig, params.Exclusive) + return fmt.Errorf("parameter %q value %q must contain only one of %v", params.Name, valFromConfig, params.ExclusiveSet) } } if params.IsNested() { @@ -326,7 +341,7 @@ func setConfigValueHelper(valFromConfig string, field reflect.Value) error { // configParamValue is a function that returns the value of the parameter based on the parsing order func (sc *ScalerConfig) configParamValue(params Params) (string, bool) { - for _, po := range params.ParsingOrder { + for _, po := range params.Order { var m map[string]string key := params.Name switch po { @@ -338,7 +353,8 @@ func (sc *ScalerConfig) configParamValue(params Params) (string, bool) { m = sc.ResolvedEnv key = sc.TriggerMetadata[fmt.Sprintf("%sFromEnv", params.Name)] default: - m = sc.TriggerMetadata + // this is checked when parsing the tags but adding as default case to avoid any potential future problems + return "", false } if param, ok := m[key]; ok && param != "" { return strings.TrimSpace(param), true @@ -362,12 +378,17 @@ func paramsFromTag(tag string, field reflect.StructField) (Params, error) { if len(tsplit) > 1 { params.Optional, _ = strconv.ParseBool(strings.TrimSpace(tsplit[1])) } - case parsingOrderTag: + case orderTag: if len(tsplit) > 1 { - parsingOrder := strings.Split(tsplit[1], tagValueSeparator) - for _, po := range parsingOrder { + order := strings.Split(tsplit[1], tagValueSeparator) + for _, po := range order { poTyped := ParsingOrder(strings.TrimSpace(po)) - params.ParsingOrder = append(params.ParsingOrder, poTyped) + if !allowedParsingOrderMap[poTyped] { + apo := maps.Keys(allowedParsingOrderMap) + slices.Sort(apo) + return params, fmt.Errorf("unknown parsing order value %s, has to be one of %s", po, apo) + } + params.Order = append(params.Order, poTyped) } } case nameTag: @@ -388,9 +409,9 @@ func paramsFromTag(tag string, field reflect.StructField) (Params, error) { if len(tsplit) > 1 { params.Enum = strings.Split(tsplit[1], tagValueSeparator) } - case exclusiveTag: + case exclusiveSetTag: if len(tsplit) > 1 { - params.Exclusive = strings.Split(tsplit[1], tagValueSeparator) + params.ExclusiveSet = strings.Split(tsplit[1], tagValueSeparator) } case "": continue diff --git a/pkg/scalers/scalersconfig/typed_config_test.go b/pkg/scalers/scalersconfig/typed_config_test.go index c9f1e4f75e1..8da2a5b9954 100644 --- a/pkg/scalers/scalersconfig/typed_config_test.go +++ b/pkg/scalers/scalersconfig/typed_config_test.go @@ -44,11 +44,11 @@ func TestBasicTypedConfig(t *testing.T) { } type testStruct struct { - StringVal string `keda:"name=stringVal, parsingOrder=triggerMetadata"` - IntVal int `keda:"name=intVal, parsingOrder=triggerMetadata"` - BoolVal bool `keda:"name=boolVal, parsingOrder=resolvedEnv"` - FloatVal float64 `keda:"name=floatVal, parsingOrder=resolvedEnv"` - AuthVal string `keda:"name=auth, parsingOrder=authParams"` + StringVal string `keda:"name=stringVal, order=triggerMetadata"` + IntVal int `keda:"name=intVal, order=triggerMetadata"` + BoolVal bool `keda:"name=boolVal, order=resolvedEnv"` + FloatVal float64 `keda:"name=floatVal, order=resolvedEnv"` + AuthVal string `keda:"name=auth, order=authParams"` } ts := testStruct{} @@ -82,9 +82,9 @@ func TestParsingOrder(t *testing.T) { } type testStruct struct { - StringVal string `keda:"name=stringVal, parsingOrder=resolvedEnv;triggerMetadata"` - IntVal int `keda:"name=intVal, parsingOrder=triggerMetadata;resolvedEnv"` - FloatVal float64 `keda:"name=floatVal, parsingOrder=resolvedEnv;triggerMetadata"` + StringVal string `keda:"name=stringVal, order=resolvedEnv;triggerMetadata"` + IntVal int `keda:"name=intVal, order=triggerMetadata;resolvedEnv"` + FloatVal float64 `keda:"name=floatVal, order=resolvedEnv;triggerMetadata"` } ts := testStruct{} @@ -107,9 +107,9 @@ func TestOptional(t *testing.T) { } type testStruct struct { - StringVal string `keda:"name=stringVal, parsingOrder=triggerMetadata"` - IntValOptional int `keda:"name=intVal, parsingOrder=triggerMetadata, optional"` - IntValAlsoOptional int `keda:"name=intVal, parsingOrder=triggerMetadata, optional=true"` + StringVal string `keda:"name=stringVal, order=triggerMetadata"` + IntValOptional int `keda:"name=intVal, order=triggerMetadata, optional"` + IntValAlsoOptional int `keda:"name=intVal, order=triggerMetadata, optional=true"` } ts := testStruct{} @@ -128,7 +128,7 @@ func TestMissing(t *testing.T) { sc := &ScalerConfig{} type testStruct struct { - StringVal string `keda:"name=stringVal, parsingOrder=triggerMetadata"` + StringVal string `keda:"name=stringVal, order=triggerMetadata"` } ts := testStruct{} @@ -147,7 +147,7 @@ func TestDeprecated(t *testing.T) { } type testStruct struct { - StringVal string `keda:"name=stringVal, parsingOrder=triggerMetadata, deprecated=deprecated"` + StringVal string `keda:"name=stringVal, order=triggerMetadata, deprecated=deprecated"` } ts := testStruct{} @@ -174,9 +174,9 @@ func TestDefaultValue(t *testing.T) { } type testStruct struct { - BoolVal bool `keda:"name=boolVal, parsingOrder=triggerMetadata, optional, default=true"` - StringVal string `keda:"name=stringVal, parsingOrder=triggerMetadata, optional, default=d"` - StringVal2 string `keda:"name=stringVal2, parsingOrder=triggerMetadata, optional, default=d"` + BoolVal bool `keda:"name=boolVal, order=triggerMetadata, optional, default=true"` + StringVal string `keda:"name=stringVal, order=triggerMetadata, optional, default=d"` + StringVal2 string `keda:"name=stringVal2, order=triggerMetadata, optional, default=d"` } ts := testStruct{} @@ -199,7 +199,7 @@ func TestMap(t *testing.T) { } type testStruct struct { - MapVal map[string]int `keda:"name=mapVal, parsingOrder=triggerMetadata"` + MapVal map[string]int `keda:"name=mapVal, order=triggerMetadata"` } ts := testStruct{} @@ -222,8 +222,8 @@ func TestSlice(t *testing.T) { } type testStruct struct { - SliceVal []int `keda:"name=sliceVal, parsingOrder=triggerMetadata"` - SliceValWithSpaces []int `keda:"name=sliceValWithSpaces, parsingOrder=triggerMetadata"` + SliceVal []int `keda:"name=sliceVal, order=triggerMetadata"` + SliceValWithSpaces []int `keda:"name=sliceValWithSpaces, order=triggerMetadata"` } ts := testStruct{} @@ -251,8 +251,8 @@ func TestEnum(t *testing.T) { } type testStruct struct { - EnumVal string `keda:"name=enumVal, parsingOrder=triggerMetadata, enum=value1;value2"` - EnumSlice []string `keda:"name=enumSlice, parsingOrder=triggerMetadata, enum=value1;value2, optional"` + EnumVal string `keda:"name=enumVal, order=triggerMetadata, enum=value1;value2"` + EnumSlice []string `keda:"name=enumSlice, order=triggerMetadata, enum=value1;value2, optional"` } ts := testStruct{} @@ -273,12 +273,12 @@ func TestEnum(t *testing.T) { Expect(err).To(MatchError(`parameter "enumVal" value "value3" must be one of [value1 value2]`)) } -// TestExclusive tests the exclusive type +// TestExclusive tests the exclusiveSet type func TestExclusive(t *testing.T) { RegisterTestingT(t) type testStruct struct { - IntVal []int `keda:"name=intVal, parsingOrder=triggerMetadata, exclusive=1;4;5"` + IntVal []int `keda:"name=intVal, order=triggerMetadata, exclusiveSet=1;4;5"` } sc := &ScalerConfig{ @@ -313,7 +313,7 @@ func TestURLValues(t *testing.T) { } type testStruct struct { - EndpointParams url.Values `keda:"name=endpointParams, parsingOrder=authParams"` + EndpointParams url.Values `keda:"name=endpointParams, order=authParams"` } ts := testStruct{} @@ -338,7 +338,7 @@ func TestGenericMap(t *testing.T) { // structurally similar to url.Values but should behave as generic map type testStruct struct { - EndpointParams map[string][]string `keda:"name=endpointParams, parsingOrder=authParams"` + EndpointParams map[string][]string `keda:"name=endpointParams, order=authParams"` } ts := testStruct{} @@ -365,8 +365,8 @@ func TestNestedStruct(t *testing.T) { } type basicAuth struct { - Username string `keda:"name=username, parsingOrder=authParams"` - Password string `keda:"name=password, parsingOrder=authParams"` + Username string `keda:"name=username, order=authParams"` + Password string `keda:"name=password, order=authParams"` } type testStruct struct { @@ -393,8 +393,8 @@ func TestEmbeddedStruct(t *testing.T) { type testStruct struct { BasicAuth struct { - Username string `keda:"name=username, parsingOrder=authParams"` - Password string `keda:"name=password, parsingOrder=authParams"` + Username string `keda:"name=username, order=authParams"` + Password string `keda:"name=password, order=authParams"` } `keda:""` } @@ -436,9 +436,9 @@ func TestNestedOptional(t *testing.T) { } type basicAuth struct { - Username string `keda:"name=username, parsingOrder=authParams"` - Password string `keda:"name=password, parsingOrder=authParams, optional"` - AlsoOptionalThanksToParent string `keda:"name=optional, parsingOrder=authParams"` + Username string `keda:"name=username, order=authParams"` + Password string `keda:"name=password, order=authParams, optional"` + AlsoOptionalThanksToParent string `keda:"name=optional, order=authParams"` } type testStruct struct { @@ -465,8 +465,8 @@ func TestNestedPointer(t *testing.T) { } type basicAuth struct { - Username string `keda:"name=username, parsingOrder=authParams"` - Password string `keda:"name=password, parsingOrder=authParams"` + Username string `keda:"name=username, order=authParams"` + Password string `keda:"name=password, order=authParams"` } type testStruct struct { @@ -480,3 +480,38 @@ func TestNestedPointer(t *testing.T) { Expect(ts.BA.Username).To(Equal("user")) Expect(ts.BA.Password).To(Equal("pass")) } + +// TestNoParsingOrder tests when no parsing order is provided +func TestNoParsingOrder(t *testing.T) { + RegisterTestingT(t) + + sc := &ScalerConfig{ + TriggerMetadata: map[string]string{ + "strVal": "value1", + "defaultVal": "value2", + }, + } + + type testStructMissing struct { + StrVal string `keda:"name=strVal, enum=value1;value2"` + } + tsm := testStructMissing{} + err := sc.TypedConfig(&tsm) + Expect(err).To(MatchError(`missing required parameter "strVal", no 'order' tag, provide any from [authParams resolvedEnv triggerMetadata]`)) + + type testStructDefault struct { + DefaultVal string `keda:"name=defaultVal, default=dv"` + } + tsd := testStructDefault{} + err = sc.TypedConfig(&tsd) + Expect(err).To(BeNil()) + Expect(tsd.DefaultVal).To(Equal("dv")) + + type testStructDefaultMissing struct { + DefaultVal2 string `keda:"name=defaultVal2, default=dv"` + } + tsdm := testStructDefaultMissing{} + err = sc.TypedConfig(&tsdm) + Expect(err).To(BeNil()) + Expect(tsdm.DefaultVal2).To(Equal("dv")) +}