Adds feature to fetch release values and secret values from remote (#47)

* Adds feature to fetch environment values from remote

The releases and environment section allow for values files on the local
disk.
This enhancement allows for referencing remote (go-getter) files to be
fetched, cached and referenced.

In addition when fetching a remote git source with a ssh key the ssh key
will not be part of the caching folder name. This avoids two problems:
1. Don't leak sensitive information in the name of the caching folder
2. Base64 encoded SSH keys are very long. On some file systems the max
lenght of the directory name is hit when using the full base64
information in the path name.

The sshkey informations are reducted. Because of this fixed string
there is a change of colloding cache names. The likelihood of this
collision is very low. The git repo and git reference need to be the
same, but the sshkey can change. This will result in the same source to
be checkout out and referenced.

Signed-off-by: Lüchinger Dominic <dev@snowgarden.ch>

* Update pkg/state/storage.go

Co-authored-by: Yusuke Kuoka <ykuoka@gmail.com>
This commit is contained in:
Dominic 2022-06-05 08:08:38 +02:00 committed by GitHub
parent b6f3972381
commit 789af92c09
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 110 additions and 4 deletions

View File

@ -4,6 +4,7 @@ import (
"context"
"encoding/json"
"fmt"
neturl "net/url"
"os"
"path/filepath"
"strings"
@ -186,7 +187,11 @@ func (r *Remote) Fetch(goGetterSrc string, cacheDirOpt ...string) (string, error
replacer := strings.NewReplacer(":", "", "//", "_", "/", "_", ".", "_")
dirKey := replacer.Replace(srcDir)
if len(query) > 0 {
paramsKey := strings.Replace(query, "&", "_", -1)
q, _ := neturl.ParseQuery(query)
if q.Has("sshkey") {
q.Set("sshkey", "redacted")
}
paramsKey := strings.Replace(q.Encode(), "&", "_", -1)
cacheKey = fmt.Sprintf("%s.%s", dirKey, paramsKey)
} else {
cacheKey = dirKey

View File

@ -161,6 +161,78 @@ func TestRemote_SShGitHub(t *testing.T) {
}
}
func TestRemote_SShGitHub_WithSshKey(t *testing.T) {
cleanfs := map[string]string{
CacheDir(): "",
}
cachefs := map[string]string{
filepath.Join(CacheDir(), "ssh_github_com_cloudposse_helmfiles_git.ref=0.40.0_sshkey=redacted/releases/kiam.yaml"): "foo: bar",
}
type testcase struct {
files map[string]string
expectCacheHit bool
}
testcases := []testcase{
{files: cleanfs, expectCacheHit: false},
{files: cachefs, expectCacheHit: true},
}
for i := range testcases {
testcase := testcases[i]
t.Run(fmt.Sprintf("case %d", i), func(t *testing.T) {
testfs := testhelper.NewTestFs(testcase.files)
hit := true
get := func(wd, src, dst string) error {
if wd != CacheDir() {
return fmt.Errorf("unexpected wd: %s", wd)
}
if src != "git::ssh://git@github.com/cloudposse/helmfiles.git?ref=0.40.0&sshkey=ZWNkc2Etc2hhMi1uaXN0cDI1NiBBQUFBRTJWalpITmhMWE5vWVRJdGJtbHpkSEF5TlRZQUFBQUlibWx6ZEhBeU5UWUFBQUJCQkJTU3dOY2xoVzQ2Vm9VR3dMQ3JscVRHYUdOVWdRVUVEUEptc1ZzdUViL2RBNUcrQk9YMWxGaUVMYU9HQ2F6bS9KQkR2V3Y2Y0ZDQUtVRjVocVJOUjdJPSA=" {
return fmt.Errorf("unexpected src: %s", src)
}
hit = false
return nil
}
getter := &testGetter{
get: get,
}
remote := &Remote{
Logger: helmexec.NewLogger(os.Stderr, "debug"),
Home: CacheDir(),
Getter: getter,
ReadFile: testfs.ReadFile,
FileExists: testfs.FileExistsAt,
DirExists: testfs.DirectoryExistsAt,
}
url := "git::ssh://git@github.com/cloudposse/helmfiles.git@releases/kiam.yaml?ref=0.40.0&sshkey=ZWNkc2Etc2hhMi1uaXN0cDI1NiBBQUFBRTJWalpITmhMWE5vWVRJdGJtbHpkSEF5TlRZQUFBQUlibWx6ZEhBeU5UWUFBQUJCQkJTU3dOY2xoVzQ2Vm9VR3dMQ3JscVRHYUdOVWdRVUVEUEptc1ZzdUViL2RBNUcrQk9YMWxGaUVMYU9HQ2F6bS9KQkR2V3Y2Y0ZDQUtVRjVocVJOUjdJPSA="
file, err := remote.Locate(url)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expectedFile := filepath.Join(CacheDir(), "ssh_github_com_cloudposse_helmfiles_git.ref=0.40.0_sshkey=redacted/releases/kiam.yaml")
if file != expectedFile {
t.Errorf("unexpected file located: %s vs expected: %s", file, expectedFile)
}
if testcase.expectCacheHit && !hit {
t.Errorf("unexpected result: unexpected cache miss")
}
if !testcase.expectCacheHit && hit {
t.Errorf("unexpected result: unexpected cache hit")
}
})
}
}
func TestParse(t *testing.T) {
type testcase struct {
input string

View File

@ -49,6 +49,24 @@ func TestEnvValsLoad_SingleValuesFile(t *testing.T) {
}
}
// Fetch Environment values from remote
func TestEnvValsLoad_SingleValuesFileRemote(t *testing.T) {
l := newLoader()
actual, err := l.LoadEnvironmentValues(nil, []interface{}{"git::https://github.com/helm/helm.git@cmd/helm/testdata/output/values.yaml?ref=v3.8.0"}, nil)
if err != nil {
t.Fatal(err)
}
expected := map[string]interface{}{
"name": string("value"),
}
if diff := cmp.Diff(expected, actual); diff != "" {
t.Errorf(diff)
}
}
// See https://github.com/roboll/helmfile/issues/1150
func TestEnvValsLoad_OverwriteNilValue_Issue1150(t *testing.T) {
l := newLoader()

View File

@ -2,11 +2,11 @@ package state
import (
"fmt"
"github.com/helmfile/helmfile/pkg/remote"
"go.uber.org/zap"
"net/url"
"path/filepath"
"sort"
"go.uber.org/zap"
)
type Storage struct {
@ -14,6 +14,7 @@ type Storage struct {
FilePath string
readFile func(string) ([]byte, error)
basePath string
glob func(string) ([]string, error)
}
@ -30,7 +31,17 @@ func NewStorage(forFile string, logger *zap.SugaredLogger, glob func(string) ([]
func (st *Storage) resolveFile(missingFileHandler *string, tpe, path string) ([]string, bool, error) {
title := fmt.Sprintf("%s file", tpe)
files, err := st.ExpandPaths(path)
var files []string
var err error
if remote.IsRemote(path) {
r := remote.NewRemote(st.logger, "", st.readFile, directoryExistsAt, fileExistsAt)
fetchedDir, _ := r.Fetch(path, "values")
files = []string{fetchedDir}
} else {
files, err = st.ExpandPaths(path)
}
if err != nil {
return nil, false, err
}