feat(pkg/policy): improve TopKeys function handling (#1730)
Signed-off-by: yxxhero <aiopsclub@163.com>
This commit is contained in:
parent
349c471035
commit
f3c49ae53a
|
|
@ -8,6 +8,7 @@ import (
|
||||||
"regexp"
|
"regexp"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
|
||||||
"github.com/helmfile/helmfile/pkg/runtime"
|
"github.com/helmfile/helmfile/pkg/runtime"
|
||||||
)
|
)
|
||||||
|
|
@ -83,13 +84,15 @@ func TopKeys(helmfileContent []byte, hasSeparator bool) []string {
|
||||||
clines := bytes.Split(helmfileContent, []byte("\n"))
|
clines := bytes.Split(helmfileContent, []byte("\n"))
|
||||||
|
|
||||||
for _, line := range clines {
|
for _, line := range clines {
|
||||||
lineStr := strings.TrimSpace(string(line))
|
lineStr := strings.TrimRightFunc(string(line), unicode.IsSpace)
|
||||||
if lineStr == "" {
|
if lineStr == "" {
|
||||||
continue // Skip empty lines
|
continue // Skip empty lines
|
||||||
}
|
}
|
||||||
if hasSeparator && separatorRegex.MatchString(lineStr) {
|
if hasSeparator && separatorRegex.MatchString(lineStr) {
|
||||||
topKeys = append(topKeys, lineStr)
|
topKeys = append(topKeys, lineStr)
|
||||||
} else if topConfigKeysRegex.MatchString(lineStr) {
|
}
|
||||||
|
|
||||||
|
if topConfigKeysRegex.MatchString(lineStr) {
|
||||||
topKey := strings.SplitN(lineStr, ":", 2)[0]
|
topKey := strings.SplitN(lineStr, ":", 2)[0]
|
||||||
topKeys = append(topKeys, topKey)
|
topKeys = append(topKeys, topKey)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -230,6 +230,11 @@ func TestTopKeys(t *testing.T) {
|
||||||
helmfileContent: []byte(""),
|
helmfileContent: []byte(""),
|
||||||
want: nil,
|
want: nil,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "sub level contains top level key",
|
||||||
|
helmfileContent: []byte("bases:\n releases:\n - name: test\n namespace: test\n"),
|
||||||
|
want: []string{"bases"},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue