From 56c01ee510c77c82a27264dbc7ea03a65f8f8db2 Mon Sep 17 00:00:00 2001 From: Nemanja Zeljkovic Date: Fri, 27 Jun 2025 14:05:02 +0200 Subject: [PATCH] Add support for key+ syntax Signed-off-by: Nemanja Zeljkovic --- pkg/environment/environment.go | 3 +- pkg/state/state.go | 139 ++++++--- pkg/state/state_test.go | 4 +- pkg/yaml/append_processor.go | 174 +++++++++++ pkg/yaml/append_processor_test.go | 494 ++++++++++++++++++++++++++++++ pkg/yaml/yaml.go | 73 +++++ 6 files changed, 836 insertions(+), 51 deletions(-) create mode 100644 pkg/yaml/append_processor.go create mode 100644 pkg/yaml/append_processor_test.go diff --git a/pkg/environment/environment.go b/pkg/environment/environment.go index 325d73c9..a1294cc1 100644 --- a/pkg/environment/environment.go +++ b/pkg/environment/environment.go @@ -85,7 +85,8 @@ func (e *Environment) GetMergedValues() (map[string]any, error) { return nil, err } - if err := mergo.Merge(&vals, e.Values, mergo.WithOverride); err != nil { + processor := yaml.NewAppendProcessor() + if err := processor.MergeWithAppend(vals, e.Values); err != nil { return nil, err } diff --git a/pkg/state/state.go b/pkg/state/state.go index 0e2659d9..235fe3dd 100644 --- a/pkg/state/state.go +++ b/pkg/state/state.go @@ -19,7 +19,6 @@ import ( "text/template" "time" - "dario.cat/mergo" "github.com/Masterminds/semver/v3" "github.com/helmfile/chartify" "github.com/helmfile/vals" @@ -1699,11 +1698,12 @@ func (st *HelmState) WriteReleasesValues(helm helmexec.Interface, additionalValu return []error{fmt.Errorf("reading %s: %w", f, err)} } - if err := yaml.Unmarshal(srcBytes, &src); err != nil { + if err := yaml.UnmarshalWithAppend(srcBytes, &src); err != nil { return []error{fmt.Errorf("unmarshalling yaml %s: %w", f, err)} } - if err := mergo.Merge(&merged, &src, mergo.WithOverride); err != nil { + processor := yaml.NewAppendProcessor() + if err := processor.MergeWithAppend(merged, src); err != nil { return []error{fmt.Errorf("merging %s: %w", f, err)} } } @@ -3298,94 +3298,137 @@ func (st *HelmState) getReleaseMissingFileHandler(release *ReleaseSpec) *string } } -func (st *HelmState) generateTemporaryReleaseValuesFiles(release *ReleaseSpec, values []any) ([]string, error) { - generatedFiles := []string{} +func (st *HelmState) generateTemporaryReleaseValuesFiles(release *ReleaseSpec, values []any, missingFileHandler *string) ([]string, error) { + var generatedFiles []string + var mergedRaw = make(map[string]any) for _, value := range values { + var fileValues map[string]any + switch typedValue := value.(type) { case string: paths, skip, err := st.storage().resolveFile(st.getReleaseMissingFileHandler(release), "values", typedValue, st.getReleaseMissingFileHandlerConfig(release).resolveFileOptions()...) if err != nil { - return generatedFiles, err + return nil, err } if skip { continue } if len(paths) > 1 { - return generatedFiles, fmt.Errorf("glob patterns in release values and secrets is not supported yet. please submit a feature request if necessary") + return nil, fmt.Errorf("glob patterns in release values and secrets is not supported yet. please submit a feature request if necessary") } path := paths[0] yamlBytes, err := st.RenderReleaseValuesFileToBytes(release, path) if err != nil { - return generatedFiles, fmt.Errorf("failed to render values files \"%s\": %v", typedValue, err) + return nil, fmt.Errorf("failed to render values files \"%s\": %v", typedValue, err) } - valfile, err := createTempValuesFile(release, yamlBytes) - if err != nil { - return generatedFiles, err - } - defer func() { - _ = valfile.Close() - }() - - if _, err := valfile.Write(yamlBytes); err != nil { - return generatedFiles, fmt.Errorf("failed to write %s: %v", valfile.Name(), err) + if err := yaml.Unmarshal(yamlBytes, &fileValues); err != nil { + return nil, fmt.Errorf("failed to unmarshal values file \"%s\": %v", typedValue, err) } - st.logger.Debugf("Successfully generated the value file at %s. produced:\n%s", path, string(yamlBytes)) - - generatedFiles = append(generatedFiles, valfile.Name()) case map[any]any, map[string]any: - valfile, err := createTempValuesFile(release, typedValue) - if err != nil { - return generatedFiles, err - } - defer func() { - _ = valfile.Close() - }() - - encoder := yaml.NewEncoder(valfile) - defer func() { - _ = encoder.Close() - }() - - if err := encoder.Encode(typedValue); err != nil { - return generatedFiles, err + if m, ok := typedValue.(map[string]any); ok { + fileValues = m + } else { + fileValues = make(map[string]any) + for k, v := range typedValue.(map[any]any) { + if strKey, ok := k.(string); ok { + fileValues[strKey] = v + } + } } - generatedFiles = append(generatedFiles, valfile.Name()) default: - return generatedFiles, fmt.Errorf("unexpected type of value: value=%v, type=%T", typedValue, typedValue) + return nil, fmt.Errorf("unexpected type of value: value=%v, type=%T", typedValue, typedValue) + } + + for k, v := range fileValues { + mergedRaw[k] = mergeAppendValues(mergedRaw[k], v, k) } } + + processor := yaml.NewAppendProcessor() + processed, err := processor.ProcessMap(mergedRaw) + if err != nil { + return nil, fmt.Errorf("failed to process key+ syntax: %w", err) + } + + if len(processed) > 0 { + valfile, err := createTempValuesFile(release, processed) + if err != nil { + return nil, err + } + defer func() { + _ = valfile.Close() + }() + + encoder := yaml.NewEncoder(valfile) + defer func() { + _ = encoder.Close() + }() + + if err := encoder.Encode(processed); err != nil { + return nil, err + } + + generatedFiles = append(generatedFiles, valfile.Name()) + } + return generatedFiles, nil } +// mergeAppendValues merges two values for the same key, preserving key+ keys for later processing +func mergeAppendValues(existing, incoming any, key string) any { + if existing == nil { + return incoming + } + if em, ok := existing.(map[string]any); ok { + if im, ok := incoming.(map[string]any); ok { + for k, v := range im { + em[k] = mergeAppendValues(em[k], v, k) + } + return em + } + } + return incoming +} + func (st *HelmState) generateVanillaValuesFiles(release *ReleaseSpec) ([]string, error) { values := []any{} + inlineValues := []any{} + for _, v := range release.Values { switch typedValue := v.(type) { case string: path := st.storage().normalizePath(release.ValuesPathPrefix + typedValue) values = append(values, path) + case map[any]any, map[string]any: + inlineValues = append(inlineValues, v) default: values = append(values, v) } } - valuesMapSecretsRendered, err := st.valsRuntime.Eval(map[string]any{"values": values}) - if err != nil { - return nil, err + var valuesSecretsRendered []any + if len(values) > 0 { + valuesMapSecretsRendered, err := st.valsRuntime.Eval(map[string]any{"values": values}) + if err != nil { + return nil, err + } + + rendered, ok := valuesMapSecretsRendered["values"].([]any) + if !ok { + return nil, fmt.Errorf("Failed to render values in %s for release %s: type %T isn't supported", st.FilePath, release.Name, valuesMapSecretsRendered["values"]) + } + valuesSecretsRendered = rendered } - valuesSecretsRendered, ok := valuesMapSecretsRendered["values"].([]any) - if !ok { - return nil, fmt.Errorf("Failed to render values in %s for release %s: type %T isn't supported", st.FilePath, release.Name, valuesMapSecretsRendered["values"]) - } + allValues := append(valuesSecretsRendered, inlineValues...) - generatedFiles, err := st.generateTemporaryReleaseValuesFiles(release, valuesSecretsRendered) + generatedFiles, err := st.generateTemporaryReleaseValuesFiles(release, allValues, release.MissingFileHandler) if err != nil { return nil, err } @@ -3451,7 +3494,7 @@ func (st *HelmState) generateSecretValuesFiles(helm helmexec.Interface, release generatedDecryptedFiles = append(generatedDecryptedFiles, valfile) } - generatedFiles, err := st.generateTemporaryReleaseValuesFiles(release, generatedDecryptedFiles) + generatedFiles, err := st.generateTemporaryReleaseValuesFiles(release, generatedDecryptedFiles, release.MissingFileHandler) if err != nil { return nil, err } @@ -3965,7 +4008,7 @@ func (st *HelmState) LoadYAMLForEmbedding(release *ReleaseSpec, entries []any, m return nil, fmt.Errorf("failed to render values files \"%s\": %v", t, err) } - if err := yaml.Unmarshal(yamlBytes, &values); err != nil { + if err := yaml.UnmarshalWithAppend(yamlBytes, &values); err != nil { return nil, err } diff --git a/pkg/state/state_test.go b/pkg/state/state_test.go index 4834ba1c..6d55e307 100644 --- a/pkg/state/state_test.go +++ b/pkg/state/state_test.go @@ -1942,7 +1942,7 @@ func TestHelmState_SyncReleasesCleanup(t *testing.T) { }, }, helm: &exectest.Helm{}, - expectedNumRemovedFiles: 2, + expectedNumRemovedFiles: 1, }, } for i := range tests { @@ -2029,7 +2029,7 @@ func TestHelmState_DiffReleasesCleanup(t *testing.T) { }, }, helm: &exectest.Helm{}, - expectedNumRemovedFiles: 2, + expectedNumRemovedFiles: 1, }, } for i := range tests { diff --git a/pkg/yaml/append_processor.go b/pkg/yaml/append_processor.go new file mode 100644 index 00000000..b85184b2 --- /dev/null +++ b/pkg/yaml/append_processor.go @@ -0,0 +1,174 @@ +package yaml + +import ( + "fmt" + "strings" +) + +type AppendProcessor struct{} + +func NewAppendProcessor() *AppendProcessor { + return &AppendProcessor{} +} + +func (ap *AppendProcessor) ProcessMap(data map[string]any) (map[string]any, error) { + result := make(map[string]any) + + // First pass: collect all append keys and their base keys + appendKeys := make(map[string][]any) + baseKeys := make(map[string]any) + + for key, value := range data { + if IsAppendKey(key) { + baseKey := GetBaseKey(key) + appendKeys[baseKey] = append(appendKeys[baseKey], value) + } else { + baseKeys[key] = value + } + } + + // Second pass: process all values recursively + for key, value := range baseKeys { + processedValue, err := ap.processValue(value) + if err != nil { + return nil, fmt.Errorf("failed to process value for key %s: %w", key, err) + } + result[key] = processedValue + } + + // Third pass: merge append keys with their base keys + for baseKey, appendValues := range appendKeys { + for _, appendValue := range appendValues { + processedValue, err := ap.processValue(appendValue) + if err != nil { + return nil, fmt.Errorf("failed to process append value for key %s: %w", baseKey, err) + } + if existingValue, exists := result[baseKey]; exists { + if isSlice(processedValue) && isSlice(existingValue) { + // Always append to the base key's slice + result[baseKey] = append(existingValue.([]any), processedValue.([]any)...) + } else { + // If not both slices, overwrite (fallback) + result[baseKey] = processedValue + } + } else { + result[baseKey] = processedValue + } + } + } + + return result, nil +} + +func (ap *AppendProcessor) processValue(value any) (any, error) { + switch v := value.(type) { + case map[string]any: + return ap.ProcessMap(v) + case map[any]any: + converted := make(map[string]any) + for k, val := range v { + if strKey, ok := k.(string); ok { + converted[strKey] = val + } else { + return nil, fmt.Errorf("non-string key in map: %v", k) + } + } + return ap.ProcessMap(converted) + case []any: + result := make([]any, len(v)) + for i, elem := range v { + processed, err := ap.processValue(elem) + if err != nil { + return nil, fmt.Errorf("failed to process slice element %d: %w", i, err) + } + result[i] = processed + } + return result, nil + default: + return value, nil + } +} + +func (ap *AppendProcessor) MergeWithAppend(dest, src map[string]any) error { + convertToStringMapInPlace(dest) + convertToStringMapInPlace(src) + + for key, srcValue := range src { + if IsAppendKey(key) { + baseKey := GetBaseKey(key) + destValue, exists := dest[baseKey] + if exists { + if isSlice(srcValue) && isSlice(destValue) { + destSlice := destValue.([]any) + srcSlice := srcValue.([]any) + dest[baseKey] = append(destSlice, srcSlice...) + } else { + dest[baseKey] = srcValue + } + } else { + dest[baseKey] = srcValue + } + delete(src, key) + } + } + + for key, srcValue := range src { + if isMap(srcValue) { + srcMap := srcValue.(map[string]any) + if destMap, ok := dest[key].(map[string]any); ok { + if err := ap.MergeWithAppend(destMap, srcMap); err != nil { + return err + } + dest[key] = destMap + } else { + dest[key] = srcMap + } + } else { + dest[key] = srcValue + } + } + return nil +} + +func convertToStringMapInPlace(v any) any { + switch t := v.(type) { + case map[string]any: + for k, v2 := range t { + t[k] = convertToStringMapInPlace(v2) + } + return t + case map[any]any: + m := make(map[string]any, len(t)) + for k, v2 := range t { + if ks, ok := k.(string); ok { + m[ks] = convertToStringMapInPlace(v2) + } + } + return m + case []any: + for i, v2 := range t { + t[i] = convertToStringMapInPlace(v2) + } + return t + default: + return v + } +} + +func isSlice(value any) bool { + _, ok := value.([]any) + return ok +} + +func isMap(value any) bool { + _, ok := value.(map[string]any) + return ok +} + +func IsAppendKey(key string) bool { + return strings.HasSuffix(key, "+") +} + +func GetBaseKey(key string) string { + return strings.TrimSuffix(key, "+") +} diff --git a/pkg/yaml/append_processor_test.go b/pkg/yaml/append_processor_test.go new file mode 100644 index 00000000..96a52821 --- /dev/null +++ b/pkg/yaml/append_processor_test.go @@ -0,0 +1,494 @@ +package yaml + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestAppendProcessor_ProcessMap(t *testing.T) { + tests := []struct { + name string + input map[string]any + expected map[string]any + wantErr bool + }{ + { + name: "simple append to list", + input: map[string]any{ + "values+": []any{"new-value"}, + }, + expected: map[string]any{ + "values": []any{"new-value"}, + }, + }, + { + name: "nested append", + input: map[string]any{ + "config": map[string]any{ + "items+": []any{"item1", "item2"}, + }, + }, + expected: map[string]any{ + "config": map[string]any{ + "items": []any{"item1", "item2"}, + }, + }, + }, + { + name: "mixed regular and append keys", + input: map[string]any{ + "name": "test", + "values+": []any{"value1"}, + "config": map[string]any{ + "enabled": true, + "items+": []any{"item1"}, + }, + }, + expected: map[string]any{ + "name": "test", + "values": []any{"value1"}, + "config": map[string]any{ + "enabled": true, + "items": []any{"item1"}, + }, + }, + }, + { + name: "non-list append value", + input: map[string]any{ + "key+": "value", + }, + expected: map[string]any{ + "key": "value", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + processor := NewAppendProcessor() + result, err := processor.ProcessMap(tt.input) + + if tt.wantErr { + assert.Error(t, err) + return + } + + require.NoError(t, err) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestAppendProcessor_MergeWithAppend(t *testing.T) { + tests := []struct { + name string + dest map[string]any + src map[string]any + expected map[string]any + wantErr bool + }{ + { + name: "append to existing list", + dest: map[string]any{ + "values": []any{"existing"}, + }, + src: map[string]any{ + "values+": []any{"new"}, + }, + expected: map[string]any{ + "values": []any{"existing", "new"}, + }, + }, + { + name: "append to non-existent list", + dest: map[string]any{ + "other": "value", + }, + src: map[string]any{ + "values+": []any{"new"}, + }, + expected: map[string]any{ + "other": "value", + "values": []any{"new"}, + }, + }, + { + name: "nested append", + dest: map[string]any{ + "config": map[string]any{ + "items": []any{"existing"}, + }, + }, + src: map[string]any{ + "config": map[string]any{ + "items+": []any{"new"}, + }, + }, + expected: map[string]any{ + "config": map[string]any{ + "items": []any{"existing", "new"}, + }, + }, + }, + { + name: "scalar with key+ treated as regular key (replace)", + dest: map[string]any{ + "replicas": 2, + }, + src: map[string]any{ + "replicas+": 1, + }, + expected: map[string]any{ + "replicas": 1, + }, + }, + { + name: "map with key+ treated as regular key (replace)", + dest: map[string]any{ + "resources": map[string]any{ + "limits": map[string]any{ + "memory": "256Mi", + "cpu": "200m", + }, + }, + }, + src: map[string]any{ + "resources+": map[string]any{ + "requests": map[string]any{ + "memory": "128Mi", + "cpu": "100m", + }, + }, + }, + expected: map[string]any{ + "resources": map[string]any{ + "requests": map[string]any{ + "memory": "128Mi", + "cpu": "100m", + }, + }, + }, + }, + { + name: "complex nested merge with key+ syntax for lists only", + dest: map[string]any{ + "replicas": 2, + "resources": map[string]any{ + "limits": map[string]any{ + "memory": "256Mi", + "cpu": "200m", + }, + "requests": map[string]any{ + "memory": "128Mi", + "cpu": "100m", + }, + }, + "service": map[string]any{ + "type": "ClusterIP", + "port": 80, + }, + "kube-state-metrics": map[string]any{ + "prometheus": map[string]any{ + "metricsRelabel": []any{ + map[string]any{"action": "drop"}, + }, + }, + }, + }, + src: map[string]any{ + "replicas+": 1, + "resources+": map[string]any{ + "limits": map[string]any{ + "memory": "512Mi", + "cpu": "500m", + }, + "requests": map[string]any{ + "memory": "256Mi", + "cpu": "250m", + }, + }, + "service+": map[string]any{ + "type": "LoadBalancer", + "port": 443, + "annotations": map[string]any{ + "service.beta.kubernetes.io/aws-load-balancer-type": "nlb", + }, + }, + "kube-state-metrics": map[string]any{ + "prometheus": map[string]any{ + "metricsRelabel+": []any{ + map[string]any{"action": "keep"}, + }, + }, + }, + }, + expected: map[string]any{ + "replicas": 1, + "resources": map[string]any{ + "limits": map[string]any{ + "memory": "512Mi", + "cpu": "500m", + }, + "requests": map[string]any{ + "memory": "256Mi", + "cpu": "250m", + }, + }, + "service": map[string]any{ + "type": "LoadBalancer", + "port": 443, + "annotations": map[string]any{ + "service.beta.kubernetes.io/aws-load-balancer-type": "nlb", + }, + }, + "kube-state-metrics": map[string]any{ + "prometheus": map[string]any{ + "metricsRelabel": []any{ + map[string]any{"action": "drop"}, + map[string]any{"action": "keep"}, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + processor := NewAppendProcessor() + err := processor.MergeWithAppend(tt.dest, tt.src) + + if tt.wantErr { + assert.Error(t, err) + return + } + + require.NoError(t, err) + assert.Equal(t, tt.expected, tt.dest) + }) + } +} + +func TestUnmarshalWithAppend(t *testing.T) { + tests := []struct { + name string + yamlData string + expected map[string]any + wantErr bool + }{ + { + name: "simple append syntax", + yamlData: ` +values+: + - item1 + - item2 +name: test +`, + expected: map[string]any{ + "values": []any{"item1", "item2"}, + "name": "test", + }, + }, + { + name: "nested append syntax", + yamlData: ` +config: + items+: + - existing + - new + enabled: true +`, + expected: map[string]any{ + "config": map[string]any{ + "items": []any{"existing", "new"}, + "enabled": true, + }, + }, + }, + { + name: "complex values file with key+ syntax", + yamlData: ` +replicas+: 1 +resources+: + limits: + memory: 512Mi + cpu: 500m + requests: + memory: 256Mi + cpu: 250m +service+: + type: LoadBalancer + port: 443 + annotations: + service.beta.kubernetes.io/aws-load-balancer-type: nlb +kube-state-metrics: + prometheus: + metricsRelabel+: + - action: keep +`, + expected: map[string]any{ + "replicas": 1, + "resources": map[string]any{ + "limits": map[string]any{ + "memory": "512Mi", + "cpu": "500m", + }, + "requests": map[string]any{ + "memory": "256Mi", + "cpu": "250m", + }, + }, + "service": map[string]any{ + "type": "LoadBalancer", + "port": 443, + "annotations": map[string]any{ + "service.beta.kubernetes.io/aws-load-balancer-type": "nlb", + }, + }, + "kube-state-metrics": map[string]any{ + "prometheus": map[string]any{ + "metricsRelabel": []any{ + map[string]any{"action": "keep"}, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var result map[string]any + err := UnmarshalWithAppend([]byte(tt.yamlData), &result) + + if tt.wantErr { + assert.Error(t, err) + return + } + + require.NoError(t, err) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestIsAppendKey(t *testing.T) { + tests := []struct { + key string + expected bool + }{ + {"key+", true}, + {"key", false}, + {"key++", true}, + {"+key", false}, + {"", false}, + } + + for _, tt := range tests { + t.Run(tt.key, func(t *testing.T) { + result := IsAppendKey(tt.key) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestGetBaseKey(t *testing.T) { + tests := []struct { + key string + expected string + }{ + {"key+", "key"}, + {"key", "key"}, + {"key++", "key+"}, + {"+key", "+key"}, + {"", ""}, + } + + for _, tt := range tests { + t.Run(tt.key, func(t *testing.T) { + result := GetBaseKey(tt.key) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestAppendProcessor_ErrorCases(t *testing.T) { + tests := []struct { + name string + input map[string]any + wantErr bool + }{ + { + name: "invalid map with non-string key", + input: map[string]any{ + "valid": map[any]any{ + 123: "invalid", // non-string key + }, + }, + wantErr: true, + }, + { + name: "valid map with string keys", + input: map[string]any{ + "valid": map[string]any{ + "key": "value", + }, + }, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + processor := NewAppendProcessor() + _, err := processor.ProcessMap(tt.input) + + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestUnmarshalWithAppend_ErrorCases(t *testing.T) { + tests := []struct { + name string + yamlData string + wantErr bool + }{ + { + name: "invalid YAML", + yamlData: ` +invalid: yaml: content + - missing: proper: structure +`, + wantErr: true, + }, + { + name: "valid YAML with key+", + yamlData: ` +valid: true +key+: value +`, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var result map[string]any + err := UnmarshalWithAppend([]byte(tt.yamlData), &result) + + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} diff --git a/pkg/yaml/yaml.go b/pkg/yaml/yaml.go index ffde3622..e909c1a6 100644 --- a/pkg/yaml/yaml.go +++ b/pkg/yaml/yaml.go @@ -63,3 +63,76 @@ func Unmarshal(data []byte, v any) error { return v2.Unmarshal(data, v) } + +// UnmarshalWithAppend unmarshals YAML data with support for key+ syntax +// This function first unmarshals the YAML normally, then processes any key+ syntax +func UnmarshalWithAppend(data []byte, v any) error { + var rawData map[string]any + if err := Unmarshal(data, &rawData); err != nil { + return err + } + + processor := NewAppendProcessor() + processedData, err := processor.ProcessMap(rawData) + if err != nil { + return err + } + + processedYAML, err := Marshal(processedData) + if err != nil { + return err + } + + return Unmarshal(processedYAML, v) +} + +// NewDecoderWithAppend creates and returns a function that is used to decode a YAML document +// with support for key+ syntax for appending values to lists +func NewDecoderWithAppend(data []byte, strict bool) func(any) error { + if runtime.GoYamlV3 { + decoder := v3.NewDecoder(bytes.NewReader(data)) + decoder.KnownFields(strict) + return func(v any) error { + var rawData map[string]any + if err := decoder.Decode(&rawData); err != nil { + return err + } + + processor := NewAppendProcessor() + processedData, err := processor.ProcessMap(rawData) + if err != nil { + return err + } + + processedYAML, err := Marshal(processedData) + if err != nil { + return err + } + + return v3.Unmarshal(processedYAML, v) + } + } + + decoder := v2.NewDecoder(bytes.NewReader(data)) + decoder.SetStrict(strict) + + return func(v any) error { + var rawData map[string]any + if err := decoder.Decode(&rawData); err != nil { + return err + } + + processor := NewAppendProcessor() + processedData, err := processor.ProcessMap(rawData) + if err != nil { + return err + } + + processedYAML, err := Marshal(processedData) + if err != nil { + return err + } + + return v2.Unmarshal(processedYAML, v) + } +}