Remove unneeded code and cleanup

Signed-off-by: Nemanja Zeljkovic <nocturo@gmail.com>
This commit is contained in:
Nemanja Zeljkovic 2025-07-17 10:23:45 +02:00
parent abc98d7deb
commit 23cb164708
5 changed files with 77 additions and 812 deletions

View File

@ -1698,7 +1698,7 @@ func (st *HelmState) WriteReleasesValues(helm helmexec.Interface, additionalValu
return []error{fmt.Errorf("reading %s: %w", f, err)}
}
if err := yaml.UnmarshalWithAppend(srcBytes, &src); err != nil {
if err := yaml.Unmarshal(srcBytes, &src); err != nil {
return []error{fmt.Errorf("unmarshalling yaml %s: %w", f, err)}
}
@ -3345,19 +3345,16 @@ func (st *HelmState) generateTemporaryReleaseValuesFiles(release *ReleaseSpec, v
return nil, fmt.Errorf("unexpected type of value: value=%v, type=%T", typedValue, typedValue)
}
for k, v := range fileValues {
mergedRaw[k] = mergeAppendValues(mergedRaw[k], v)
}
}
processor := yaml.NewAppendProcessor()
processed, err := processor.ProcessMap(mergedRaw)
if err != nil {
for k, v := range fileValues {
if err := processor.MergeWithAppend(mergedRaw, map[string]any{k: v}); err != nil {
return nil, fmt.Errorf("failed to process key+ syntax: %w", err)
}
}
}
if len(processed) > 0 {
valfile, err := createTempValuesFile(release, processed)
if len(mergedRaw) > 0 {
valfile, err := createTempValuesFile(release, mergedRaw)
if err != nil {
return nil, err
}
@ -3370,7 +3367,7 @@ func (st *HelmState) generateTemporaryReleaseValuesFiles(release *ReleaseSpec, v
_ = encoder.Close()
}()
if err := encoder.Encode(processed); err != nil {
if err := encoder.Encode(mergedRaw); err != nil {
return nil, err
}
@ -3380,22 +3377,6 @@ func (st *HelmState) generateTemporaryReleaseValuesFiles(release *ReleaseSpec, v
return generatedFiles, nil
}
// mergeAppendValues merges two values for the same key, preserving key+ keys for later processing
func mergeAppendValues(existing, incoming any) any {
if existing == nil {
return incoming
}
if em, ok := existing.(map[string]any); ok {
if im, ok := incoming.(map[string]any); ok {
for k, v := range im {
em[k] = mergeAppendValues(em[k], v)
}
return em
}
}
return incoming
}
func (st *HelmState) generateVanillaValuesFiles(release *ReleaseSpec) ([]string, error) {
values := []any{}
inlineValues := []any{}
@ -4008,7 +3989,7 @@ func (st *HelmState) LoadYAMLForEmbedding(release *ReleaseSpec, entries []any, m
return nil, fmt.Errorf("failed to render values files \"%s\": %v", t, err)
}
if err := yaml.UnmarshalWithAppend(yamlBytes, &values); err != nil {
if err := yaml.Unmarshal(yamlBytes, &values); err != nil {
return nil, err
}

View File

@ -4571,245 +4571,3 @@ func TestPrepareSyncReleases_ValueControlReleaseOverride(t *testing.T) {
require.Equal(t, tt.flags, r.flags, "Wrong value control flag for release %s", r.release.Name)
}
}
func TestMergeAppendValues(t *testing.T) {
tests := []struct {
name string
existing any
incoming any
expected any
}{
{
name: "nil existing returns incoming",
existing: nil,
incoming: "test-value",
expected: "test-value",
},
{
name: "nil existing with map incoming",
existing: nil,
incoming: map[string]any{"key": "value"},
expected: map[string]any{"key": "value"},
},
{
name: "scalar existing with scalar incoming",
existing: "old-value",
incoming: "new-value",
expected: "new-value",
},
{
name: "scalar existing with map incoming",
existing: "old-value",
incoming: map[string]any{"key": "value"},
expected: map[string]any{"key": "value"},
},
{
name: "map existing with scalar incoming",
existing: map[string]any{"key": "value"},
incoming: "new-value",
expected: "new-value",
},
{
name: "map existing with map incoming - simple merge",
existing: map[string]any{
"key1": "value1",
"key2": "value2",
},
incoming: map[string]any{
"key2": "new-value2",
"key3": "value3",
},
expected: map[string]any{
"key1": "value1",
"key2": "new-value2",
"key3": "value3",
},
},
{
name: "map existing with map incoming - nested merge",
existing: map[string]any{
"level1": map[string]any{
"level2": map[string]any{
"key1": "value1",
"key2": "value2",
},
},
},
incoming: map[string]any{
"level1": map[string]any{
"level2": map[string]any{
"key2": "new-value2",
"key3": "value3",
},
},
},
expected: map[string]any{
"level1": map[string]any{
"level2": map[string]any{
"key1": "value1",
"key2": "new-value2",
"key3": "value3",
},
},
},
},
{
name: "map existing with map incoming - deep nested merge",
existing: map[string]any{
"a": map[string]any{
"b": map[string]any{
"c": map[string]any{
"d": "value-d",
"e": "value-e",
},
},
},
},
incoming: map[string]any{
"a": map[string]any{
"b": map[string]any{
"c": map[string]any{
"e": "new-value-e",
"f": "value-f",
},
},
},
},
expected: map[string]any{
"a": map[string]any{
"b": map[string]any{
"c": map[string]any{
"d": "value-d",
"e": "new-value-e",
"f": "value-f",
},
},
},
},
},
{
name: "map existing with map incoming - mixed types",
existing: map[string]any{
"string": "old-string",
"int": 42,
"bool": true,
"slice": []any{"item1", "item2"},
"nested": map[string]any{
"key": "nested-value",
},
},
incoming: map[string]any{
"string": "new-string",
"int": 100,
"bool": false,
"slice": []any{"item3", "item4"},
"nested": map[string]any{
"new-key": "new-nested-value",
},
},
expected: map[string]any{
"string": "new-string",
"int": 100,
"bool": false,
"slice": []any{"item3", "item4"},
"nested": map[string]any{
"key": "nested-value",
"new-key": "new-nested-value",
},
},
},
{
name: "map existing with map incoming - empty maps",
existing: map[string]any{},
incoming: map[string]any{},
expected: map[string]any{},
},
{
name: "map existing with map incoming - empty existing",
existing: map[string]any{},
incoming: map[string]any{
"key1": "value1",
"key2": "value2",
},
expected: map[string]any{
"key1": "value1",
"key2": "value2",
},
},
{
name: "map existing with map incoming - empty incoming",
existing: map[string]any{
"key1": "value1",
"key2": "value2",
},
incoming: map[string]any{},
expected: map[string]any{
"key1": "value1",
"key2": "value2",
},
},
{
name: "map existing with map incoming - complex nested structure",
existing: map[string]any{
"config": map[string]any{
"database": map[string]any{
"host": "localhost",
"port": 5432,
"credentials": map[string]any{
"username": "admin",
"password": "secret",
},
},
"cache": map[string]any{
"enabled": true,
"ttl": 300,
},
},
},
incoming: map[string]any{
"config": map[string]any{
"database": map[string]any{
"host": "production-db",
"credentials": map[string]any{
"password": "new-secret",
"ssl": true,
},
},
"cache": map[string]any{
"ttl": 600,
},
"logging": map[string]any{
"level": "info",
},
},
},
expected: map[string]any{
"config": map[string]any{
"database": map[string]any{
"host": "production-db",
"port": 5432,
"credentials": map[string]any{
"username": "admin",
"password": "new-secret",
"ssl": true,
},
},
"cache": map[string]any{
"enabled": true,
"ttl": 600,
},
"logging": map[string]any{
"level": "info",
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := mergeAppendValues(tt.existing, tt.incoming)
assert.Equal(t, tt.expected, result)
})
}
}

View File

@ -1,7 +1,6 @@
package yaml
import (
"fmt"
"strings"
)
@ -11,84 +10,6 @@ func NewAppendProcessor() *AppendProcessor {
return &AppendProcessor{}
}
func (ap *AppendProcessor) ProcessMap(data map[string]any) (map[string]any, error) {
result := make(map[string]any)
// First pass: collect all append keys and their base keys
appendKeys := make(map[string][]any)
baseKeys := make(map[string]any)
for key, value := range data {
if IsAppendKey(key) {
baseKey := GetBaseKey(key)
appendKeys[baseKey] = append(appendKeys[baseKey], value)
} else {
baseKeys[key] = value
}
}
// Second pass: process all values recursively
for key, value := range baseKeys {
processedValue, err := ap.processValue(value)
if err != nil {
return nil, fmt.Errorf("failed to process value for key %s: %w", key, err)
}
result[key] = processedValue
}
// Third pass: merge append keys with their base keys
for baseKey, appendValues := range appendKeys {
for _, appendValue := range appendValues {
processedValue, err := ap.processValue(appendValue)
if err != nil {
return nil, fmt.Errorf("failed to process append value for key %s: %w", baseKey, err)
}
if existingValue, exists := result[baseKey]; exists {
if isSlice(processedValue) && isSlice(existingValue) {
// Always append to the base key's slice
result[baseKey] = append(existingValue.([]any), processedValue.([]any)...)
} else {
// If not both slices, overwrite (fallback)
result[baseKey] = processedValue
}
} else {
result[baseKey] = processedValue
}
}
}
return result, nil
}
func (ap *AppendProcessor) processValue(value any) (any, error) {
switch v := value.(type) {
case map[string]any:
return ap.ProcessMap(v)
case map[any]any:
converted := make(map[string]any)
for k, val := range v {
if strKey, ok := k.(string); ok {
converted[strKey] = val
} else {
return nil, fmt.Errorf("non-string key in map: %v", k)
}
}
return ap.ProcessMap(converted)
case []any:
result := make([]any, len(v))
for i, elem := range v {
processed, err := ap.processValue(elem)
if err != nil {
return nil, fmt.Errorf("failed to process slice element %d: %w", i, err)
}
result[i] = processed
}
return result, nil
default:
return value, nil
}
}
func (ap *AppendProcessor) MergeWithAppend(dest, src map[string]any) error {
convertToStringMapInPlace(dest)
convertToStringMapInPlace(src)

View File

@ -7,249 +7,115 @@ import (
"github.com/stretchr/testify/require"
)
func TestAppendProcessor_ProcessMap(t *testing.T) {
tests := []struct {
name string
input map[string]any
expected map[string]any
wantErr bool
}{
{
name: "simple append to list",
input: map[string]any{
"values+": []any{"new-value"},
},
expected: map[string]any{
"values": []any{"new-value"},
},
},
{
name: "nested append",
input: map[string]any{
"config": map[string]any{
"items+": []any{"item1", "item2"},
},
},
expected: map[string]any{
"config": map[string]any{
"items": []any{"item1", "item2"},
},
},
},
{
name: "mixed regular and append keys",
input: map[string]any{
"name": "test",
"values+": []any{"value1"},
"config": map[string]any{
"enabled": true,
"items+": []any{"item1"},
},
},
expected: map[string]any{
"name": "test",
"values": []any{"value1"},
"config": map[string]any{
"enabled": true,
"items": []any{"item1"},
},
},
},
{
name: "non-list append value",
input: map[string]any{
"key+": "value",
},
expected: map[string]any{
"key": "value",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
processor := NewAppendProcessor()
result, err := processor.ProcessMap(tt.input)
if tt.wantErr {
assert.Error(t, err)
return
}
require.NoError(t, err)
assert.Equal(t, tt.expected, result)
})
}
}
func TestAppendProcessor_MergeWithAppend(t *testing.T) {
processor := NewAppendProcessor()
tests := []struct {
name string
dest map[string]any
src map[string]any
expected map[string]any
wantErr bool
}{
{
name: "append to existing list",
name: "append to existing slice",
dest: map[string]any{
"values": []any{"existing"},
"values": []any{
map[string]any{
"key": "a",
},
map[string]any{
"key": "b",
},
},
},
src: map[string]any{
"values+": []any{"new"},
"values+": []any{
map[string]any{
"key": "c",
},
},
},
expected: map[string]any{
"values": []any{"existing", "new"},
"values": []any{
map[string]any{
"key": "a",
},
map[string]any{
"key": "b",
},
map[string]any{
"key": "c",
},
},
{
name: "append to non-existent list",
dest: map[string]any{
"other": "value",
},
src: map[string]any{
"values+": []any{"new"},
},
expected: map[string]any{
"other": "value",
"values": []any{"new"},
},
},
{
name: "nested append",
dest: map[string]any{
"config": map[string]any{
"items": []any{"existing"},
"values": []any{
map[string]any{
"key": "a",
},
},
},
},
src: map[string]any{
"config": map[string]any{
"items+": []any{"new"},
"values+": []any{
map[string]any{
"key": "b",
},
},
},
},
expected: map[string]any{
"config": map[string]any{
"items": []any{"existing", "new"},
"values": []any{
map[string]any{
"key": "a",
},
map[string]any{
"key": "b",
},
},
{
name: "scalar with key+ treated as regular key (replace)",
dest: map[string]any{
"replicas": 2,
},
src: map[string]any{
"replicas+": 1,
},
expected: map[string]any{
"replicas": 1,
},
},
{
name: "map with key+ treated as regular key (replace)",
dest: map[string]any{
"resources": map[string]any{
"limits": map[string]any{
"memory": "256Mi",
"cpu": "200m",
},
},
},
src: map[string]any{
"resources+": map[string]any{
"requests": map[string]any{
"memory": "128Mi",
"cpu": "100m",
},
},
},
expected: map[string]any{
"resources": map[string]any{
"requests": map[string]any{
"memory": "128Mi",
"cpu": "100m",
},
},
},
},
{
name: "complex nested merge with key+ syntax for lists only",
dest: map[string]any{
"replicas": 2,
"resources": map[string]any{
"limits": map[string]any{
"memory": "256Mi",
"cpu": "200m",
},
"requests": map[string]any{
"memory": "128Mi",
"cpu": "100m",
},
},
"service": map[string]any{
"type": "ClusterIP",
"port": 80,
},
"kube-state-metrics": map[string]any{
"prometheus": map[string]any{
"metricsRelabel": []any{
map[string]any{"action": "drop"},
},
},
},
},
name: "create new slice if not exists",
dest: map[string]any{},
src: map[string]any{
"replicas+": 1,
"resources+": map[string]any{
"limits": map[string]any{
"memory": "512Mi",
"cpu": "500m",
},
"requests": map[string]any{
"memory": "256Mi",
"cpu": "250m",
},
},
"service+": map[string]any{
"type": "LoadBalancer",
"port": 443,
"annotations": map[string]any{
"service.beta.kubernetes.io/aws-load-balancer-type": "nlb",
},
},
"kube-state-metrics": map[string]any{
"prometheus": map[string]any{
"metricsRelabel+": []any{
map[string]any{"action": "keep"},
},
"values+": []any{
map[string]any{
"key": "a",
},
},
},
expected: map[string]any{
"replicas": 1,
"resources": map[string]any{
"limits": map[string]any{
"memory": "512Mi",
"cpu": "500m",
},
"requests": map[string]any{
"memory": "256Mi",
"cpu": "250m",
"values": []any{
map[string]any{
"key": "a",
},
},
"service": map[string]any{
"type": "LoadBalancer",
"port": 443,
"annotations": map[string]any{
"service.beta.kubernetes.io/aws-load-balancer-type": "nlb",
},
},
"kube-state-metrics": map[string]any{
"prometheus": map[string]any{
"metricsRelabel": []any{
map[string]any{"action": "drop"},
map[string]any{"action": "keep"},
{
name: "overwrite non-slice with append",
dest: map[string]any{
"values": "not a slice",
},
src: map[string]any{
"values+": []any{
map[string]any{
"key": "a",
},
},
},
expected: map[string]any{
"values": []any{
map[string]any{
"key": "a",
},
},
},
@ -258,123 +124,13 @@ func TestAppendProcessor_MergeWithAppend(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
processor := NewAppendProcessor()
err := processor.MergeWithAppend(tt.dest, tt.src)
if tt.wantErr {
assert.Error(t, err)
return
}
require.NoError(t, err)
assert.Equal(t, tt.expected, tt.dest)
})
}
}
func TestUnmarshalWithAppend(t *testing.T) {
tests := []struct {
name string
yamlData string
expected map[string]any
wantErr bool
}{
{
name: "simple append syntax",
yamlData: `
values+:
- item1
- item2
name: test
`,
expected: map[string]any{
"values": []any{"item1", "item2"},
"name": "test",
},
},
{
name: "nested append syntax",
yamlData: `
config:
items+:
- existing
- new
enabled: true
`,
expected: map[string]any{
"config": map[string]any{
"items": []any{"existing", "new"},
"enabled": true,
},
},
},
{
name: "complex values file with key+ syntax",
yamlData: `
replicas+: 1
resources+:
limits:
memory: 512Mi
cpu: 500m
requests:
memory: 256Mi
cpu: 250m
service+:
type: LoadBalancer
port: 443
annotations:
service.beta.kubernetes.io/aws-load-balancer-type: nlb
kube-state-metrics:
prometheus:
metricsRelabel+:
- action: keep
`,
expected: map[string]any{
"replicas": 1,
"resources": map[string]any{
"limits": map[string]any{
"memory": "512Mi",
"cpu": "500m",
},
"requests": map[string]any{
"memory": "256Mi",
"cpu": "250m",
},
},
"service": map[string]any{
"type": "LoadBalancer",
"port": 443,
"annotations": map[string]any{
"service.beta.kubernetes.io/aws-load-balancer-type": "nlb",
},
},
"kube-state-metrics": map[string]any{
"prometheus": map[string]any{
"metricsRelabel": []any{
map[string]any{"action": "keep"},
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var result map[string]any
err := UnmarshalWithAppend([]byte(tt.yamlData), &result)
if tt.wantErr {
assert.Error(t, err)
return
}
require.NoError(t, err)
assert.Equal(t, tt.expected, result)
})
}
}
func TestIsAppendKey(t *testing.T) {
tests := []struct {
key string
@ -414,81 +170,3 @@ func TestGetBaseKey(t *testing.T) {
})
}
}
func TestAppendProcessor_ErrorCases(t *testing.T) {
tests := []struct {
name string
input map[string]any
wantErr bool
}{
{
name: "invalid map with non-string key",
input: map[string]any{
"valid": map[any]any{
123: "invalid", // non-string key
},
},
wantErr: true,
},
{
name: "valid map with string keys",
input: map[string]any{
"valid": map[string]any{
"key": "value",
},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
processor := NewAppendProcessor()
_, err := processor.ProcessMap(tt.input)
if tt.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}
func TestUnmarshalWithAppend_ErrorCases(t *testing.T) {
tests := []struct {
name string
yamlData string
wantErr bool
}{
{
name: "invalid YAML",
yamlData: `
invalid: yaml: content
- missing: proper: structure
`,
wantErr: true,
},
{
name: "valid YAML with key+",
yamlData: `
valid: true
key+: value
`,
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var result map[string]any
err := UnmarshalWithAppend([]byte(tt.yamlData), &result)
if tt.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}

View File

@ -63,76 +63,3 @@ func Unmarshal(data []byte, v any) error {
return v2.Unmarshal(data, v)
}
// UnmarshalWithAppend unmarshals YAML data with support for key+ syntax
// This function first unmarshals the YAML normally, then processes any key+ syntax
func UnmarshalWithAppend(data []byte, v any) error {
var rawData map[string]any
if err := Unmarshal(data, &rawData); err != nil {
return err
}
processor := NewAppendProcessor()
processedData, err := processor.ProcessMap(rawData)
if err != nil {
return err
}
processedYAML, err := Marshal(processedData)
if err != nil {
return err
}
return Unmarshal(processedYAML, v)
}
// NewDecoderWithAppend creates and returns a function that is used to decode a YAML document
// with support for key+ syntax for appending values to lists
func NewDecoderWithAppend(data []byte, strict bool) func(any) error {
if runtime.GoYamlV3 {
decoder := v3.NewDecoder(bytes.NewReader(data))
decoder.KnownFields(strict)
return func(v any) error {
var rawData map[string]any
if err := decoder.Decode(&rawData); err != nil {
return err
}
processor := NewAppendProcessor()
processedData, err := processor.ProcessMap(rawData)
if err != nil {
return err
}
processedYAML, err := Marshal(processedData)
if err != nil {
return err
}
return v3.Unmarshal(processedYAML, v)
}
}
decoder := v2.NewDecoder(bytes.NewReader(data))
decoder.SetStrict(strict)
return func(v any) error {
var rawData map[string]any
if err := decoder.Decode(&rawData); err != nil {
return err
}
processor := NewAppendProcessor()
processedData, err := processor.ProcessMap(rawData)
if err != nil {
return err
}
processedYAML, err := Marshal(processedData)
if err != nil {
return err
}
return v2.Unmarshal(processedYAML, v)
}
}