Add support for key+ syntax
Signed-off-by: Nemanja Zeljkovic <nocturo@gmail.com>
This commit is contained in:
parent
a76bec234c
commit
56c01ee510
|
|
@ -85,7 +85,8 @@ func (e *Environment) GetMergedValues() (map[string]any, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
if err := mergo.Merge(&vals, e.Values, mergo.WithOverride); err != nil {
|
||||
processor := yaml.NewAppendProcessor()
|
||||
if err := processor.MergeWithAppend(vals, e.Values); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ import (
|
|||
"text/template"
|
||||
"time"
|
||||
|
||||
"dario.cat/mergo"
|
||||
"github.com/Masterminds/semver/v3"
|
||||
"github.com/helmfile/chartify"
|
||||
"github.com/helmfile/vals"
|
||||
|
|
@ -1699,11 +1698,12 @@ func (st *HelmState) WriteReleasesValues(helm helmexec.Interface, additionalValu
|
|||
return []error{fmt.Errorf("reading %s: %w", f, err)}
|
||||
}
|
||||
|
||||
if err := yaml.Unmarshal(srcBytes, &src); err != nil {
|
||||
if err := yaml.UnmarshalWithAppend(srcBytes, &src); err != nil {
|
||||
return []error{fmt.Errorf("unmarshalling yaml %s: %w", f, err)}
|
||||
}
|
||||
|
||||
if err := mergo.Merge(&merged, &src, mergo.WithOverride); err != nil {
|
||||
processor := yaml.NewAppendProcessor()
|
||||
if err := processor.MergeWithAppend(merged, src); err != nil {
|
||||
return []error{fmt.Errorf("merging %s: %w", f, err)}
|
||||
}
|
||||
}
|
||||
|
|
@ -3298,49 +3298,68 @@ func (st *HelmState) getReleaseMissingFileHandler(release *ReleaseSpec) *string
|
|||
}
|
||||
}
|
||||
|
||||
func (st *HelmState) generateTemporaryReleaseValuesFiles(release *ReleaseSpec, values []any) ([]string, error) {
|
||||
generatedFiles := []string{}
|
||||
func (st *HelmState) generateTemporaryReleaseValuesFiles(release *ReleaseSpec, values []any, missingFileHandler *string) ([]string, error) {
|
||||
var generatedFiles []string
|
||||
var mergedRaw = make(map[string]any)
|
||||
|
||||
for _, value := range values {
|
||||
var fileValues map[string]any
|
||||
|
||||
switch typedValue := value.(type) {
|
||||
case string:
|
||||
paths, skip, err := st.storage().resolveFile(st.getReleaseMissingFileHandler(release), "values", typedValue, st.getReleaseMissingFileHandlerConfig(release).resolveFileOptions()...)
|
||||
if err != nil {
|
||||
return generatedFiles, err
|
||||
return nil, err
|
||||
}
|
||||
if skip {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(paths) > 1 {
|
||||
return generatedFiles, fmt.Errorf("glob patterns in release values and secrets is not supported yet. please submit a feature request if necessary")
|
||||
return nil, fmt.Errorf("glob patterns in release values and secrets is not supported yet. please submit a feature request if necessary")
|
||||
}
|
||||
path := paths[0]
|
||||
|
||||
yamlBytes, err := st.RenderReleaseValuesFileToBytes(release, path)
|
||||
if err != nil {
|
||||
return generatedFiles, fmt.Errorf("failed to render values files \"%s\": %v", typedValue, err)
|
||||
return nil, fmt.Errorf("failed to render values files \"%s\": %v", typedValue, err)
|
||||
}
|
||||
|
||||
valfile, err := createTempValuesFile(release, yamlBytes)
|
||||
if err != nil {
|
||||
return generatedFiles, err
|
||||
}
|
||||
defer func() {
|
||||
_ = valfile.Close()
|
||||
}()
|
||||
|
||||
if _, err := valfile.Write(yamlBytes); err != nil {
|
||||
return generatedFiles, fmt.Errorf("failed to write %s: %v", valfile.Name(), err)
|
||||
if err := yaml.Unmarshal(yamlBytes, &fileValues); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal values file \"%s\": %v", typedValue, err)
|
||||
}
|
||||
|
||||
st.logger.Debugf("Successfully generated the value file at %s. produced:\n%s", path, string(yamlBytes))
|
||||
|
||||
generatedFiles = append(generatedFiles, valfile.Name())
|
||||
case map[any]any, map[string]any:
|
||||
valfile, err := createTempValuesFile(release, typedValue)
|
||||
if m, ok := typedValue.(map[string]any); ok {
|
||||
fileValues = m
|
||||
} else {
|
||||
fileValues = make(map[string]any)
|
||||
for k, v := range typedValue.(map[any]any) {
|
||||
if strKey, ok := k.(string); ok {
|
||||
fileValues[strKey] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unexpected type of value: value=%v, type=%T", typedValue, typedValue)
|
||||
}
|
||||
|
||||
for k, v := range fileValues {
|
||||
mergedRaw[k] = mergeAppendValues(mergedRaw[k], v, k)
|
||||
}
|
||||
}
|
||||
|
||||
processor := yaml.NewAppendProcessor()
|
||||
processed, err := processor.ProcessMap(mergedRaw)
|
||||
if err != nil {
|
||||
return generatedFiles, err
|
||||
return nil, fmt.Errorf("failed to process key+ syntax: %w", err)
|
||||
}
|
||||
|
||||
if len(processed) > 0 {
|
||||
valfile, err := createTempValuesFile(release, processed)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = valfile.Close()
|
||||
|
|
@ -3351,41 +3370,65 @@ func (st *HelmState) generateTemporaryReleaseValuesFiles(release *ReleaseSpec, v
|
|||
_ = encoder.Close()
|
||||
}()
|
||||
|
||||
if err := encoder.Encode(typedValue); err != nil {
|
||||
return generatedFiles, err
|
||||
if err := encoder.Encode(processed); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
generatedFiles = append(generatedFiles, valfile.Name())
|
||||
default:
|
||||
return generatedFiles, fmt.Errorf("unexpected type of value: value=%v, type=%T", typedValue, typedValue)
|
||||
}
|
||||
}
|
||||
|
||||
return generatedFiles, nil
|
||||
}
|
||||
|
||||
// mergeAppendValues merges two values for the same key, preserving key+ keys for later processing
|
||||
func mergeAppendValues(existing, incoming any, key string) any {
|
||||
if existing == nil {
|
||||
return incoming
|
||||
}
|
||||
if em, ok := existing.(map[string]any); ok {
|
||||
if im, ok := incoming.(map[string]any); ok {
|
||||
for k, v := range im {
|
||||
em[k] = mergeAppendValues(em[k], v, k)
|
||||
}
|
||||
return em
|
||||
}
|
||||
}
|
||||
return incoming
|
||||
}
|
||||
|
||||
func (st *HelmState) generateVanillaValuesFiles(release *ReleaseSpec) ([]string, error) {
|
||||
values := []any{}
|
||||
inlineValues := []any{}
|
||||
|
||||
for _, v := range release.Values {
|
||||
switch typedValue := v.(type) {
|
||||
case string:
|
||||
path := st.storage().normalizePath(release.ValuesPathPrefix + typedValue)
|
||||
values = append(values, path)
|
||||
case map[any]any, map[string]any:
|
||||
inlineValues = append(inlineValues, v)
|
||||
default:
|
||||
values = append(values, v)
|
||||
}
|
||||
}
|
||||
|
||||
var valuesSecretsRendered []any
|
||||
if len(values) > 0 {
|
||||
valuesMapSecretsRendered, err := st.valsRuntime.Eval(map[string]any{"values": values})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
valuesSecretsRendered, ok := valuesMapSecretsRendered["values"].([]any)
|
||||
rendered, ok := valuesMapSecretsRendered["values"].([]any)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("Failed to render values in %s for release %s: type %T isn't supported", st.FilePath, release.Name, valuesMapSecretsRendered["values"])
|
||||
}
|
||||
valuesSecretsRendered = rendered
|
||||
}
|
||||
|
||||
generatedFiles, err := st.generateTemporaryReleaseValuesFiles(release, valuesSecretsRendered)
|
||||
allValues := append(valuesSecretsRendered, inlineValues...)
|
||||
|
||||
generatedFiles, err := st.generateTemporaryReleaseValuesFiles(release, allValues, release.MissingFileHandler)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -3451,7 +3494,7 @@ func (st *HelmState) generateSecretValuesFiles(helm helmexec.Interface, release
|
|||
generatedDecryptedFiles = append(generatedDecryptedFiles, valfile)
|
||||
}
|
||||
|
||||
generatedFiles, err := st.generateTemporaryReleaseValuesFiles(release, generatedDecryptedFiles)
|
||||
generatedFiles, err := st.generateTemporaryReleaseValuesFiles(release, generatedDecryptedFiles, release.MissingFileHandler)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -3965,7 +4008,7 @@ func (st *HelmState) LoadYAMLForEmbedding(release *ReleaseSpec, entries []any, m
|
|||
return nil, fmt.Errorf("failed to render values files \"%s\": %v", t, err)
|
||||
}
|
||||
|
||||
if err := yaml.Unmarshal(yamlBytes, &values); err != nil {
|
||||
if err := yaml.UnmarshalWithAppend(yamlBytes, &values); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1942,7 +1942,7 @@ func TestHelmState_SyncReleasesCleanup(t *testing.T) {
|
|||
},
|
||||
},
|
||||
helm: &exectest.Helm{},
|
||||
expectedNumRemovedFiles: 2,
|
||||
expectedNumRemovedFiles: 1,
|
||||
},
|
||||
}
|
||||
for i := range tests {
|
||||
|
|
@ -2029,7 +2029,7 @@ func TestHelmState_DiffReleasesCleanup(t *testing.T) {
|
|||
},
|
||||
},
|
||||
helm: &exectest.Helm{},
|
||||
expectedNumRemovedFiles: 2,
|
||||
expectedNumRemovedFiles: 1,
|
||||
},
|
||||
}
|
||||
for i := range tests {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,174 @@
|
|||
package yaml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type AppendProcessor struct{}
|
||||
|
||||
func NewAppendProcessor() *AppendProcessor {
|
||||
return &AppendProcessor{}
|
||||
}
|
||||
|
||||
func (ap *AppendProcessor) ProcessMap(data map[string]any) (map[string]any, error) {
|
||||
result := make(map[string]any)
|
||||
|
||||
// First pass: collect all append keys and their base keys
|
||||
appendKeys := make(map[string][]any)
|
||||
baseKeys := make(map[string]any)
|
||||
|
||||
for key, value := range data {
|
||||
if IsAppendKey(key) {
|
||||
baseKey := GetBaseKey(key)
|
||||
appendKeys[baseKey] = append(appendKeys[baseKey], value)
|
||||
} else {
|
||||
baseKeys[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: process all values recursively
|
||||
for key, value := range baseKeys {
|
||||
processedValue, err := ap.processValue(value)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to process value for key %s: %w", key, err)
|
||||
}
|
||||
result[key] = processedValue
|
||||
}
|
||||
|
||||
// Third pass: merge append keys with their base keys
|
||||
for baseKey, appendValues := range appendKeys {
|
||||
for _, appendValue := range appendValues {
|
||||
processedValue, err := ap.processValue(appendValue)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to process append value for key %s: %w", baseKey, err)
|
||||
}
|
||||
if existingValue, exists := result[baseKey]; exists {
|
||||
if isSlice(processedValue) && isSlice(existingValue) {
|
||||
// Always append to the base key's slice
|
||||
result[baseKey] = append(existingValue.([]any), processedValue.([]any)...)
|
||||
} else {
|
||||
// If not both slices, overwrite (fallback)
|
||||
result[baseKey] = processedValue
|
||||
}
|
||||
} else {
|
||||
result[baseKey] = processedValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (ap *AppendProcessor) processValue(value any) (any, error) {
|
||||
switch v := value.(type) {
|
||||
case map[string]any:
|
||||
return ap.ProcessMap(v)
|
||||
case map[any]any:
|
||||
converted := make(map[string]any)
|
||||
for k, val := range v {
|
||||
if strKey, ok := k.(string); ok {
|
||||
converted[strKey] = val
|
||||
} else {
|
||||
return nil, fmt.Errorf("non-string key in map: %v", k)
|
||||
}
|
||||
}
|
||||
return ap.ProcessMap(converted)
|
||||
case []any:
|
||||
result := make([]any, len(v))
|
||||
for i, elem := range v {
|
||||
processed, err := ap.processValue(elem)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to process slice element %d: %w", i, err)
|
||||
}
|
||||
result[i] = processed
|
||||
}
|
||||
return result, nil
|
||||
default:
|
||||
return value, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (ap *AppendProcessor) MergeWithAppend(dest, src map[string]any) error {
|
||||
convertToStringMapInPlace(dest)
|
||||
convertToStringMapInPlace(src)
|
||||
|
||||
for key, srcValue := range src {
|
||||
if IsAppendKey(key) {
|
||||
baseKey := GetBaseKey(key)
|
||||
destValue, exists := dest[baseKey]
|
||||
if exists {
|
||||
if isSlice(srcValue) && isSlice(destValue) {
|
||||
destSlice := destValue.([]any)
|
||||
srcSlice := srcValue.([]any)
|
||||
dest[baseKey] = append(destSlice, srcSlice...)
|
||||
} else {
|
||||
dest[baseKey] = srcValue
|
||||
}
|
||||
} else {
|
||||
dest[baseKey] = srcValue
|
||||
}
|
||||
delete(src, key)
|
||||
}
|
||||
}
|
||||
|
||||
for key, srcValue := range src {
|
||||
if isMap(srcValue) {
|
||||
srcMap := srcValue.(map[string]any)
|
||||
if destMap, ok := dest[key].(map[string]any); ok {
|
||||
if err := ap.MergeWithAppend(destMap, srcMap); err != nil {
|
||||
return err
|
||||
}
|
||||
dest[key] = destMap
|
||||
} else {
|
||||
dest[key] = srcMap
|
||||
}
|
||||
} else {
|
||||
dest[key] = srcValue
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func convertToStringMapInPlace(v any) any {
|
||||
switch t := v.(type) {
|
||||
case map[string]any:
|
||||
for k, v2 := range t {
|
||||
t[k] = convertToStringMapInPlace(v2)
|
||||
}
|
||||
return t
|
||||
case map[any]any:
|
||||
m := make(map[string]any, len(t))
|
||||
for k, v2 := range t {
|
||||
if ks, ok := k.(string); ok {
|
||||
m[ks] = convertToStringMapInPlace(v2)
|
||||
}
|
||||
}
|
||||
return m
|
||||
case []any:
|
||||
for i, v2 := range t {
|
||||
t[i] = convertToStringMapInPlace(v2)
|
||||
}
|
||||
return t
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func isSlice(value any) bool {
|
||||
_, ok := value.([]any)
|
||||
return ok
|
||||
}
|
||||
|
||||
func isMap(value any) bool {
|
||||
_, ok := value.(map[string]any)
|
||||
return ok
|
||||
}
|
||||
|
||||
func IsAppendKey(key string) bool {
|
||||
return strings.HasSuffix(key, "+")
|
||||
}
|
||||
|
||||
func GetBaseKey(key string) string {
|
||||
return strings.TrimSuffix(key, "+")
|
||||
}
|
||||
|
|
@ -0,0 +1,494 @@
|
|||
package yaml
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestAppendProcessor_ProcessMap(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input map[string]any
|
||||
expected map[string]any
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "simple append to list",
|
||||
input: map[string]any{
|
||||
"values+": []any{"new-value"},
|
||||
},
|
||||
expected: map[string]any{
|
||||
"values": []any{"new-value"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nested append",
|
||||
input: map[string]any{
|
||||
"config": map[string]any{
|
||||
"items+": []any{"item1", "item2"},
|
||||
},
|
||||
},
|
||||
expected: map[string]any{
|
||||
"config": map[string]any{
|
||||
"items": []any{"item1", "item2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "mixed regular and append keys",
|
||||
input: map[string]any{
|
||||
"name": "test",
|
||||
"values+": []any{"value1"},
|
||||
"config": map[string]any{
|
||||
"enabled": true,
|
||||
"items+": []any{"item1"},
|
||||
},
|
||||
},
|
||||
expected: map[string]any{
|
||||
"name": "test",
|
||||
"values": []any{"value1"},
|
||||
"config": map[string]any{
|
||||
"enabled": true,
|
||||
"items": []any{"item1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "non-list append value",
|
||||
input: map[string]any{
|
||||
"key+": "value",
|
||||
},
|
||||
expected: map[string]any{
|
||||
"key": "value",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
processor := NewAppendProcessor()
|
||||
result, err := processor.ProcessMap(tt.input)
|
||||
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppendProcessor_MergeWithAppend(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
dest map[string]any
|
||||
src map[string]any
|
||||
expected map[string]any
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "append to existing list",
|
||||
dest: map[string]any{
|
||||
"values": []any{"existing"},
|
||||
},
|
||||
src: map[string]any{
|
||||
"values+": []any{"new"},
|
||||
},
|
||||
expected: map[string]any{
|
||||
"values": []any{"existing", "new"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "append to non-existent list",
|
||||
dest: map[string]any{
|
||||
"other": "value",
|
||||
},
|
||||
src: map[string]any{
|
||||
"values+": []any{"new"},
|
||||
},
|
||||
expected: map[string]any{
|
||||
"other": "value",
|
||||
"values": []any{"new"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nested append",
|
||||
dest: map[string]any{
|
||||
"config": map[string]any{
|
||||
"items": []any{"existing"},
|
||||
},
|
||||
},
|
||||
src: map[string]any{
|
||||
"config": map[string]any{
|
||||
"items+": []any{"new"},
|
||||
},
|
||||
},
|
||||
expected: map[string]any{
|
||||
"config": map[string]any{
|
||||
"items": []any{"existing", "new"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "scalar with key+ treated as regular key (replace)",
|
||||
dest: map[string]any{
|
||||
"replicas": 2,
|
||||
},
|
||||
src: map[string]any{
|
||||
"replicas+": 1,
|
||||
},
|
||||
expected: map[string]any{
|
||||
"replicas": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "map with key+ treated as regular key (replace)",
|
||||
dest: map[string]any{
|
||||
"resources": map[string]any{
|
||||
"limits": map[string]any{
|
||||
"memory": "256Mi",
|
||||
"cpu": "200m",
|
||||
},
|
||||
},
|
||||
},
|
||||
src: map[string]any{
|
||||
"resources+": map[string]any{
|
||||
"requests": map[string]any{
|
||||
"memory": "128Mi",
|
||||
"cpu": "100m",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: map[string]any{
|
||||
"resources": map[string]any{
|
||||
"requests": map[string]any{
|
||||
"memory": "128Mi",
|
||||
"cpu": "100m",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "complex nested merge with key+ syntax for lists only",
|
||||
dest: map[string]any{
|
||||
"replicas": 2,
|
||||
"resources": map[string]any{
|
||||
"limits": map[string]any{
|
||||
"memory": "256Mi",
|
||||
"cpu": "200m",
|
||||
},
|
||||
"requests": map[string]any{
|
||||
"memory": "128Mi",
|
||||
"cpu": "100m",
|
||||
},
|
||||
},
|
||||
"service": map[string]any{
|
||||
"type": "ClusterIP",
|
||||
"port": 80,
|
||||
},
|
||||
"kube-state-metrics": map[string]any{
|
||||
"prometheus": map[string]any{
|
||||
"metricsRelabel": []any{
|
||||
map[string]any{"action": "drop"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
src: map[string]any{
|
||||
"replicas+": 1,
|
||||
"resources+": map[string]any{
|
||||
"limits": map[string]any{
|
||||
"memory": "512Mi",
|
||||
"cpu": "500m",
|
||||
},
|
||||
"requests": map[string]any{
|
||||
"memory": "256Mi",
|
||||
"cpu": "250m",
|
||||
},
|
||||
},
|
||||
"service+": map[string]any{
|
||||
"type": "LoadBalancer",
|
||||
"port": 443,
|
||||
"annotations": map[string]any{
|
||||
"service.beta.kubernetes.io/aws-load-balancer-type": "nlb",
|
||||
},
|
||||
},
|
||||
"kube-state-metrics": map[string]any{
|
||||
"prometheus": map[string]any{
|
||||
"metricsRelabel+": []any{
|
||||
map[string]any{"action": "keep"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: map[string]any{
|
||||
"replicas": 1,
|
||||
"resources": map[string]any{
|
||||
"limits": map[string]any{
|
||||
"memory": "512Mi",
|
||||
"cpu": "500m",
|
||||
},
|
||||
"requests": map[string]any{
|
||||
"memory": "256Mi",
|
||||
"cpu": "250m",
|
||||
},
|
||||
},
|
||||
"service": map[string]any{
|
||||
"type": "LoadBalancer",
|
||||
"port": 443,
|
||||
"annotations": map[string]any{
|
||||
"service.beta.kubernetes.io/aws-load-balancer-type": "nlb",
|
||||
},
|
||||
},
|
||||
"kube-state-metrics": map[string]any{
|
||||
"prometheus": map[string]any{
|
||||
"metricsRelabel": []any{
|
||||
map[string]any{"action": "drop"},
|
||||
map[string]any{"action": "keep"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
processor := NewAppendProcessor()
|
||||
err := processor.MergeWithAppend(tt.dest, tt.src)
|
||||
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, tt.dest)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnmarshalWithAppend(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
yamlData string
|
||||
expected map[string]any
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "simple append syntax",
|
||||
yamlData: `
|
||||
values+:
|
||||
- item1
|
||||
- item2
|
||||
name: test
|
||||
`,
|
||||
expected: map[string]any{
|
||||
"values": []any{"item1", "item2"},
|
||||
"name": "test",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nested append syntax",
|
||||
yamlData: `
|
||||
config:
|
||||
items+:
|
||||
- existing
|
||||
- new
|
||||
enabled: true
|
||||
`,
|
||||
expected: map[string]any{
|
||||
"config": map[string]any{
|
||||
"items": []any{"existing", "new"},
|
||||
"enabled": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "complex values file with key+ syntax",
|
||||
yamlData: `
|
||||
replicas+: 1
|
||||
resources+:
|
||||
limits:
|
||||
memory: 512Mi
|
||||
cpu: 500m
|
||||
requests:
|
||||
memory: 256Mi
|
||||
cpu: 250m
|
||||
service+:
|
||||
type: LoadBalancer
|
||||
port: 443
|
||||
annotations:
|
||||
service.beta.kubernetes.io/aws-load-balancer-type: nlb
|
||||
kube-state-metrics:
|
||||
prometheus:
|
||||
metricsRelabel+:
|
||||
- action: keep
|
||||
`,
|
||||
expected: map[string]any{
|
||||
"replicas": 1,
|
||||
"resources": map[string]any{
|
||||
"limits": map[string]any{
|
||||
"memory": "512Mi",
|
||||
"cpu": "500m",
|
||||
},
|
||||
"requests": map[string]any{
|
||||
"memory": "256Mi",
|
||||
"cpu": "250m",
|
||||
},
|
||||
},
|
||||
"service": map[string]any{
|
||||
"type": "LoadBalancer",
|
||||
"port": 443,
|
||||
"annotations": map[string]any{
|
||||
"service.beta.kubernetes.io/aws-load-balancer-type": "nlb",
|
||||
},
|
||||
},
|
||||
"kube-state-metrics": map[string]any{
|
||||
"prometheus": map[string]any{
|
||||
"metricsRelabel": []any{
|
||||
map[string]any{"action": "keep"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
var result map[string]any
|
||||
err := UnmarshalWithAppend([]byte(tt.yamlData), &result)
|
||||
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsAppendKey(t *testing.T) {
|
||||
tests := []struct {
|
||||
key string
|
||||
expected bool
|
||||
}{
|
||||
{"key+", true},
|
||||
{"key", false},
|
||||
{"key++", true},
|
||||
{"+key", false},
|
||||
{"", false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.key, func(t *testing.T) {
|
||||
result := IsAppendKey(tt.key)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetBaseKey(t *testing.T) {
|
||||
tests := []struct {
|
||||
key string
|
||||
expected string
|
||||
}{
|
||||
{"key+", "key"},
|
||||
{"key", "key"},
|
||||
{"key++", "key+"},
|
||||
{"+key", "+key"},
|
||||
{"", ""},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.key, func(t *testing.T) {
|
||||
result := GetBaseKey(tt.key)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppendProcessor_ErrorCases(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input map[string]any
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "invalid map with non-string key",
|
||||
input: map[string]any{
|
||||
"valid": map[any]any{
|
||||
123: "invalid", // non-string key
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "valid map with string keys",
|
||||
input: map[string]any{
|
||||
"valid": map[string]any{
|
||||
"key": "value",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
processor := NewAppendProcessor()
|
||||
_, err := processor.ProcessMap(tt.input)
|
||||
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnmarshalWithAppend_ErrorCases(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
yamlData string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "invalid YAML",
|
||||
yamlData: `
|
||||
invalid: yaml: content
|
||||
- missing: proper: structure
|
||||
`,
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "valid YAML with key+",
|
||||
yamlData: `
|
||||
valid: true
|
||||
key+: value
|
||||
`,
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
var result map[string]any
|
||||
err := UnmarshalWithAppend([]byte(tt.yamlData), &result)
|
||||
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -63,3 +63,76 @@ func Unmarshal(data []byte, v any) error {
|
|||
|
||||
return v2.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
// UnmarshalWithAppend unmarshals YAML data with support for key+ syntax
|
||||
// This function first unmarshals the YAML normally, then processes any key+ syntax
|
||||
func UnmarshalWithAppend(data []byte, v any) error {
|
||||
var rawData map[string]any
|
||||
if err := Unmarshal(data, &rawData); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
processor := NewAppendProcessor()
|
||||
processedData, err := processor.ProcessMap(rawData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
processedYAML, err := Marshal(processedData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return Unmarshal(processedYAML, v)
|
||||
}
|
||||
|
||||
// NewDecoderWithAppend creates and returns a function that is used to decode a YAML document
|
||||
// with support for key+ syntax for appending values to lists
|
||||
func NewDecoderWithAppend(data []byte, strict bool) func(any) error {
|
||||
if runtime.GoYamlV3 {
|
||||
decoder := v3.NewDecoder(bytes.NewReader(data))
|
||||
decoder.KnownFields(strict)
|
||||
return func(v any) error {
|
||||
var rawData map[string]any
|
||||
if err := decoder.Decode(&rawData); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
processor := NewAppendProcessor()
|
||||
processedData, err := processor.ProcessMap(rawData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
processedYAML, err := Marshal(processedData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return v3.Unmarshal(processedYAML, v)
|
||||
}
|
||||
}
|
||||
|
||||
decoder := v2.NewDecoder(bytes.NewReader(data))
|
||||
decoder.SetStrict(strict)
|
||||
|
||||
return func(v any) error {
|
||||
var rawData map[string]any
|
||||
if err := decoder.Decode(&rawData); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
processor := NewAppendProcessor()
|
||||
processedData, err := processor.ProcessMap(rawData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
processedYAML, err := Marshal(processedData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return v2.Unmarshal(processedYAML, v)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue