helmfile/pkg/hcllang/hcl_loader.go

526 lines
16 KiB
Go
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

package hcllang
import (
nativejson "encoding/json"
"fmt"
"slices"
"strings"
"dario.cat/mergo"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclparse"
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/variantdev/dag/pkg/dag"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/json"
"go.uber.org/zap"
"github.com/helmfile/helmfile/pkg/filesystem"
)
const (
badIdentifierDetail = "A name must start with a letter or underscore and may contain only letters, digits, underscores, and dashes."
ValuesBlockIdentifier = "values"
LocalsBlockIdentifier = "locals"
valuesAccessorPrefix = "hv"
localsAccessorPrefix = "local"
)
// ctyMergeValues returns a new cty.Value that is the deepmerge of a and b.
// a is the “base”, b is the “override”
func ctyMergeValues(a, b cty.Value) cty.Value {
// If b is null, keep a.
if b.IsNull() {
return a
}
// If b is unknown (but not null), let the override propagate.
if !b.IsKnown() {
return b
}
// If a is null or unknown, just return b.
if a.IsNull() || !a.IsKnown() {
return b
}
// Objects or Maps -> merge per key.
if (a.Type().IsObjectType() && b.Type().IsObjectType()) ||
(a.Type().IsMapType() && b.Type().IsMapType()) {
mergedAttrs := make(map[string]cty.Value)
// Start with all attrs from a.
for name, av := range a.AsValueMap() {
mergedAttrs[name] = av
}
// Overlay attrs from b.
for name, bv := range b.AsValueMap() {
if av, ok := mergedAttrs[name]; ok {
mergedAttrs[name] = ctyMergeValues(av, bv)
} else {
mergedAttrs[name] = bv
}
}
if a.Type().IsMapType() {
return cty.MapVal(mergedAttrs)
}
return cty.ObjectVal(mergedAttrs)
}
// Tuples / Lists -> replace
if (a.Type().IsTupleType() && b.Type().IsTupleType()) ||
(a.Type().IsListType() && b.Type().IsListType()) {
return b
}
// Anything else (numbers, strings, bools, etc.) -> righthand side wins.
return b
}
// HelmfileHCLValue represents a single entry from a "values" or "locals" block file.
// The blocks itself is not represented, because it serves only to
// provide context for us to interpret its contents.
type HelmfileHCLValue struct {
Name string
Expr hcl.Expression
Range hcl.Range
}
type HCLLoader struct {
hclFilesPath []string
fs *filesystem.FileSystem
logger *zap.SugaredLogger
allVariableDefs map[string][]*HelmfileHCLValue // Track all definitions for merging
}
func NewHCLLoader(fs *filesystem.FileSystem, logger *zap.SugaredLogger) *HCLLoader {
return &HCLLoader{
fs: fs,
logger: logger,
}
}
func (hl *HCLLoader) AddFile(file string) {
hl.hclFilesPath = append(hl.hclFilesPath, file)
}
func (hl *HCLLoader) AddFiles(files []string) {
hl.hclFilesPath = append(hl.hclFilesPath, files...)
}
func (hl *HCLLoader) Length() int {
return len(hl.hclFilesPath)
}
func (hl *HCLLoader) HCLRender() (map[string]any, error) {
if hl.Length() == 0 {
return nil, fmt.Errorf("nothing to render")
}
HelmfileHCLValues, locals, diags := hl.readHCLs()
if len(diags) > 0 {
return nil, diags.Errs()[0]
}
// Decode all locals from all files first
// in order for them to be usable in values blocks
localsCty := map[string]map[string]cty.Value{}
for k, local := range locals {
dagPlan, err := hl.createDAGGraph(local, LocalsBlockIdentifier)
if err != nil {
return nil, err
}
localFileCty, err := hl.decodeGraph(dagPlan, LocalsBlockIdentifier, locals[k], nil)
if err != nil {
return nil, err
}
localsCty[k] = make(map[string]cty.Value)
localsCty[k][localsAccessorPrefix] = localFileCty[localsAccessorPrefix]
}
// Decode Values
dagHelmfileValuePlan, err := hl.createDAGGraph(HelmfileHCLValues, ValuesBlockIdentifier)
if err != nil {
return nil, err
}
helmfileVarCty, err := hl.decodeGraph(dagHelmfileValuePlan, ValuesBlockIdentifier, HelmfileHCLValues, localsCty)
if err != nil {
return nil, err
}
nativeGovals, err := hl.convertToGo(helmfileVarCty)
if err != nil {
return nil, err
}
return nativeGovals, nil
}
func (hl *HCLLoader) createDAGGraph(HelmfileHCLValues map[string]*HelmfileHCLValue, blockType string) (*dag.Topology, error) {
dagGraph := dag.New()
for _, hv := range HelmfileHCLValues {
var traversals []string
// For values blocks with multiple definitions, collect dependencies from ALL definitions
// to ensure proper evaluation order even if only earlier definitions have dependencies
if blockType == ValuesBlockIdentifier && hl.allVariableDefs != nil {
varDefs := hl.allVariableDefs[hv.Name]
if len(varDefs) > 1 {
// Collect traversals from all definitions
for _, varDef := range varDefs {
for _, tr := range varDef.Expr.Variables() {
attr, diags := hl.parseSingleAttrRef(tr, blockType)
if diags != nil {
return nil, fmt.Errorf("%s", diags.Errs()[0])
}
if attr != "" && !slices.Contains(traversals, attr) {
traversals = append(traversals, attr)
}
}
}
} else {
// Single definition, collect traversals normally
for _, tr := range hv.Expr.Variables() {
attr, diags := hl.parseSingleAttrRef(tr, blockType)
if diags != nil {
return nil, fmt.Errorf("%s", diags.Errs()[0])
}
if attr != "" && !slices.Contains(traversals, attr) {
traversals = append(traversals, attr)
}
}
}
} else {
// For locals or when no tracking available, collect traversals normally
for _, tr := range hv.Expr.Variables() {
attr, diags := hl.parseSingleAttrRef(tr, blockType)
if diags != nil {
return nil, fmt.Errorf("%s", diags.Errs()[0])
}
if attr != "" && !slices.Contains(traversals, attr) {
traversals = append(traversals, attr)
}
}
}
hl.logger.Debugf("Adding Dependency : %s => [%s]", hv.Name, strings.Join(traversals, ", "))
dagGraph.Add(hv.Name, dag.Dependencies(traversals))
}
//Generate Dag Plan which will provide the order from which to interpolate vars
plan, err := dagGraph.Plan(dag.SortOptions{
WithDependencies: true,
})
if err == nil {
return &plan, nil
}
if ude, ok := err.(*dag.UndefinedDependencyError); ok {
var quotedVariableNames []string
for _, d := range ude.Dependents {
quotedVariableNames = append(quotedVariableNames, fmt.Sprintf("%q", d))
}
return nil, fmt.Errorf("variables %s depend(s) on undefined vars %q", strings.Join(quotedVariableNames, ", "), ude.UndefinedNode)
} else {
return nil, fmt.Errorf("error while building the DAG variable graph : %s", err.Error())
}
}
func (hl *HCLLoader) decodeGraph(dagTopology *dag.Topology, blocktype string, vars map[string]*HelmfileHCLValue, additionalLocalContext map[string]map[string]cty.Value) (map[string]cty.Value, error) {
values := map[string]cty.Value{}
helmfileHCLValuesValues := map[string]cty.Value{}
var diags hcl.Diagnostics
hclFunctions, err := HCLFunctions(nil)
if err != nil {
return nil, err
}
for groupIndex := 0; groupIndex < len(*dagTopology); groupIndex++ {
dagNodesInGroup := (*dagTopology)[groupIndex]
for _, node := range dagNodesInGroup {
v := vars[node.String()]
if blocktype != LocalsBlockIdentifier && additionalLocalContext[v.Range.Filename] != nil {
values[localsAccessorPrefix] = additionalLocalContext[v.Range.Filename][localsAccessorPrefix]
}
ctx := &hcl.EvalContext{
Variables: values,
Functions: hclFunctions,
}
// Check if this variable has multiple definitions (overrides) for values blocks
if blocktype == ValuesBlockIdentifier && hl.allVariableDefs != nil {
varDefs := hl.allVariableDefs[node.String()]
if len(varDefs) > 1 {
// Evaluate and merge all definitions in file order
var mergedValue cty.Value
for i, varDef := range varDefs {
// Update local context for each file
if additionalLocalContext[varDef.Range.Filename] != nil {
ctx.Variables[localsAccessorPrefix] = additionalLocalContext[varDef.Range.Filename][localsAccessorPrefix]
} else {
// Ensure locals from a previous definition/file do not leak into this evaluation
ctx.Variables[localsAccessorPrefix] = cty.NilVal
}
evalValue, evalDiags := varDef.Expr.Value(ctx)
if len(evalDiags) > 0 {
return nil, fmt.Errorf("error when trying to evaluate variable %s at %s:%d : %s",
varDef.Name, varDef.Range.Filename, varDef.Range.Start.Line, evalDiags.Errs()[0])
}
if i == 0 {
mergedValue = evalValue
} else {
mergedValue = ctyMergeValues(mergedValue, evalValue)
}
}
helmfileHCLValuesValues[node.String()] = mergedValue
// Reset local context
values[localsAccessorPrefix] = cty.NilVal
} else {
// Single definition, evaluate normally
helmfileHCLValuesValues[node.String()], diags = v.Expr.Value(ctx)
if len(diags) > 0 {
return nil, fmt.Errorf("error when trying to evaluate variable %s : %s", v.Name, diags.Errs()[0])
}
}
} else {
// For locals or when no tracking available, evaluate normally
helmfileHCLValuesValues[node.String()], diags = v.Expr.Value(ctx)
if len(diags) > 0 {
return nil, fmt.Errorf("error when trying to evaluate variable %s : %s", v.Name, diags.Errs()[0])
}
}
switch blocktype {
case ValuesBlockIdentifier:
// Update the eval context for the next value evaluation iteration
values[valuesAccessorPrefix] = cty.ObjectVal(helmfileHCLValuesValues)
// Set back local to nil to avoid an unexpected behavior when the next iteration is in another file
values[localsAccessorPrefix] = cty.NilVal
case LocalsBlockIdentifier:
values[localsAccessorPrefix] = cty.ObjectVal(helmfileHCLValuesValues)
}
}
}
return values, nil
}
func (hl *HCLLoader) readHCLs() (map[string]*HelmfileHCLValue, map[string]map[string]*HelmfileHCLValue, hcl.Diagnostics) {
var variables map[string]*HelmfileHCLValue
// Track all definitions for merging during evaluation
var allVariableDefs map[string][]*HelmfileHCLValue
var local map[string]*HelmfileHCLValue
locals := map[string]map[string]*HelmfileHCLValue{}
var diags hcl.Diagnostics
for _, file := range hl.hclFilesPath {
variables, allVariableDefs, local, diags = hl.readHCL(variables, allVariableDefs, file)
if diags != nil {
return nil, nil, diags
}
locals[file] = make(map[string]*HelmfileHCLValue)
locals[file] = local
}
// Store all definitions in the HCLLoader for use in decodeGraph
hl.allVariableDefs = allVariableDefs
return variables, locals, nil
}
func (hl *HCLLoader) readHCL(hvars map[string]*HelmfileHCLValue, allDefs map[string][]*HelmfileHCLValue, file string) (map[string]*HelmfileHCLValue, map[string][]*HelmfileHCLValue, map[string]*HelmfileHCLValue, hcl.Diagnostics) {
src, err := hl.fs.ReadFile(file)
if err != nil {
return nil, nil, nil, hcl.Diagnostics{
{
Severity: hcl.DiagError,
Summary: fmt.Sprintf("%s", err),
Detail: "could not read file",
Subject: &hcl.Range{},
},
}
}
// Parse file as HCL
p := hclparse.NewParser()
hclFile, diags := p.ParseHCL(src, file)
if hclFile == nil || hclFile.Body == nil || diags != nil {
return nil, nil, nil, diags
}
HelmfileHCLValuesSchema := &hcl.BodySchema{
Blocks: []hcl.BlockHeaderSchema{
{
Type: ValuesBlockIdentifier,
},
{
Type: LocalsBlockIdentifier,
},
},
}
// make sure content has a struct with helmfile_vars Schema defined
content, diags := hclFile.Body.Content(HelmfileHCLValuesSchema)
if diags != nil {
return nil, nil, nil, diags
}
var helmfileLocalsVars map[string]*HelmfileHCLValue
// Decode blocks to return HelmfileHCLValue object => (each var with expr + Name )
if len(content.Blocks.OfType(LocalsBlockIdentifier)) > 1 {
return nil, nil, nil, hcl.Diagnostics{
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "A file can only support exactly 1 `locals` block",
Subject: &content.Blocks[0].DefRange,
}}
}
for _, block := range content.Blocks {
var helmfileBlockVars map[string]*HelmfileHCLValue
if block.Type == ValuesBlockIdentifier {
helmfileBlockVars, diags = hl.decodeHelmfileHCLValuesBlock(block)
if diags != nil {
return nil, nil, nil, diags
}
}
if block.Type == LocalsBlockIdentifier {
helmfileLocalsVars, diags = hl.decodeHelmfileHCLValuesBlock(block)
if diags != nil {
return nil, nil, nil, diags
}
}
// Track all definitions before merging
if allDefs == nil {
allDefs = make(map[string][]*HelmfileHCLValue)
}
for k, v := range helmfileBlockVars {
// Make a copy of v to avoid it being modified by future mergo.Merge calls
vCopy := *v
// If this variable already exists in hvars (from a previous file),
// ensure the old definition is tracked before adding the new one
if hvars != nil && hvars[k] != nil {
// Check if hvars[k] (the old definition) is already in allDefs
oldDefTracked := false
for _, existing := range allDefs[k] {
// Compare by values to see if this exact definition is already tracked
if existing.Range == hvars[k].Range {
oldDefTracked = true
break
}
}
if !oldDefTracked {
// Old definition not yet tracked - add a copy of it
oldDefCopy := *hvars[k]
allDefs[k] = append(allDefs[k], &oldDefCopy)
}
}
// Now add the new definition from this file (as a copy)
allDefs[k] = append(allDefs[k], &vCopy)
}
// Allow override of variables across files - last one wins
// The actual merge will happen in decodeGraph() where we have proper evaluation context
err = mergo.Merge(&hvars, &helmfileBlockVars, mergo.WithOverride)
if err != nil {
var diags hcl.Diagnostics
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Merge failed",
Detail: err.Error(),
Subject: nil,
})
return nil, nil, nil, diags
}
}
return hvars, allDefs, helmfileLocalsVars, nil
}
func (hl *HCLLoader) decodeHelmfileHCLValuesBlock(block *hcl.Block) (map[string]*HelmfileHCLValue, hcl.Diagnostics) {
attrs, diags := block.Body.JustAttributes()
if len(attrs) == 0 || diags != nil {
return nil, diags
}
hfVars := map[string]*HelmfileHCLValue{}
for name, attr := range attrs {
if !hclsyntax.ValidIdentifier(name) {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid helmfile_vars variable name",
Detail: badIdentifierDetail,
Subject: &attr.NameRange,
})
}
hfVars[name] = &HelmfileHCLValue{
Name: name,
Expr: attr.Expr,
Range: attr.Range,
}
}
return hfVars, diags
}
func (hl *HCLLoader) parseSingleAttrRef(traversal hcl.Traversal, blockType string) (string, hcl.Diagnostics) {
if len(traversal) == 0 {
return "", hcl.Diagnostics{
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "An empty traversal can't be parsed",
},
}
}
root := traversal.RootName()
// In `values` blocks, Locals are always precomputed, so they don't need to be in the graph
if root == localsAccessorPrefix && blockType != LocalsBlockIdentifier {
return "", nil
}
rootRange := traversal[0].SourceRange()
if len(traversal) < 2 {
return "", hcl.Diagnostics{
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid reference",
Detail: fmt.Sprintf("The %q object cannot be accessed directly. Instead, access it from one of its root.", root),
Subject: &rootRange,
},
}
}
if attrTrav, ok := traversal[1].(hcl.TraverseAttr); ok {
return attrTrav.Name, nil
}
return "", hcl.Diagnostics{
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid reference",
Detail: fmt.Sprintf("The %q object does not support this operation.", root),
Subject: traversal[1].SourceRange().Ptr(),
},
}
}
func (hl *HCLLoader) convertToGo(src map[string]cty.Value) (map[string]any, error) {
// Ugly workaround on value conversion
// CTY conversion to go natives requires much processing and complexity
// All of this, in our context, can go away because of the CTY capability to dump a cty.Value as Json
// The Json document outputs 2 keys : "type" and "value" which describe the mapping between the two
// We only care about the value
b, err := json.Marshal(src[valuesAccessorPrefix], cty.DynamicPseudoType)
if err != nil {
return nil, fmt.Errorf("could not marshal cty value : %s", err.Error())
}
var jsonunm map[string]any
err = nativejson.Unmarshal(b, &jsonunm)
if err != nil {
return nil, fmt.Errorf("could not unmarshall json : %s", err.Error())
}
if result, ok := jsonunm["value"].(map[string]any); ok {
return result, nil
} else {
return nil, fmt.Errorf("could extract a map object from json \"value\" key")
}
}