Skip to content

Commit

Permalink
Add new tk tool importers command
Browse files Browse the repository at this point in the history
This command is powerful when used with the `--merge-strategy=replace-envs` flag added here: #760

Given a repository with a large number of Tanka environments, exporting the whole set can be very time consuming if done on every commit.
What a user probably wants to do is to only export the environments that have changed.
To do this, we need to actually find out what files affect which environments, and this is what this new command does.
A user can now pass the list of files modified in a git commit, and run `tk export` on the set of environments affected by those files.
  • Loading branch information
julienduchesne committed Oct 3, 2022
1 parent c5a739e commit 7fdef4d
Show file tree
Hide file tree
Showing 22 changed files with 445 additions and 6 deletions.
38 changes: 38 additions & 0 deletions cmd/tk/tool.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"strings"

"github.com/go-clix/cli"
"github.com/posener/complete"

"github.com/grafana/tanka/pkg/jsonnet"
"github.com/grafana/tanka/pkg/jsonnet/jpath"
Expand All @@ -23,6 +24,7 @@ func toolCmd() *cli.Command {
cmd.AddCommand(
jpathCmd(),
importsCmd(),
importersCmd(),
chartsCmd(),
)
return cmd
Expand Down Expand Up @@ -130,6 +132,42 @@ func importsCmd() *cli.Command {
return cmd
}

func importersCmd() *cli.Command {
cmd := &cli.Command{
Use: "importers <file> <file...>",
Short: "list all environments that either directly or transitively import the given files",
Args: cli.Args{
Validator: cli.ArgsMin(1),
Predictor: complete.PredictFiles("*"),
},
}

root := cmd.Flags().String("root", ".", "root directory to search for environments")
cmd.Run = func(cmd *cli.Command, args []string) error {
root, err := filepath.Abs(*root)
if err != nil {
return fmt.Errorf("resolving root: %w", err)
}

for _, f := range args {
if _, err := os.Stat(f); os.IsNotExist(err) {
return fmt.Errorf("file %q does not exist", f)
}
}

envs, err := jsonnet.FindImporterForFiles(root, args, nil)
if err != nil {
return fmt.Errorf("resolving imports: %s", err)
}

fmt.Println(strings.Join(envs, "\n"))

return nil
}

return cmd
}

func gitRoot() (string, error) {
s, err := git("rev-parse", "--show-toplevel")
return strings.TrimRight(s, "\n"), err
Expand Down
239 changes: 239 additions & 0 deletions pkg/jsonnet/find_importers.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,239 @@
package jsonnet

import (
"os"
"path/filepath"
"sort"
"strings"

"github.com/grafana/tanka/pkg/jsonnet/jpath"
)

// FindImporterForFiles finds the entrypoints (main.jsonnet files) that import the given files.
// It looks through imports transitively, so if a file is imported through a chain, it will still be reported.
// If the given file is a main.jsonnet file, it will be returned as well.
func FindImporterForFiles(root string, files []string, chain map[string]struct{}) ([]string, error) {
if chain == nil {
chain = make(map[string]struct{})
}

var err error
root, err = filepath.Abs(root)
if err != nil {
return nil, err
}

importers := map[string]struct{}{}

if len(chain) == 0 {
for i := range files {
files[i], err = filepath.Abs(files[i])
if err != nil {
return nil, err
}

symlink, err := evalSymlinks(files[i])
if err != nil {
return nil, err
}
if symlink != files[i] {
files = append(files, symlink)
}

symlinks, err := findSymlinks(root, files[i])
if err != nil {
return nil, err
}
files = append(files, symlinks...)
}

files = uniqueStringSlice(files)
}

for _, file := range files {
if filepath.Base(file) == jpath.DefaultEntrypoint {
importers[file] = struct{}{}
}

newImporters, err := findImporters(root, file, chain)
if err != nil {
return nil, err
}
for _, importer := range newImporters {
importers[importer] = struct{}{}
}
}

var importersSlice []string
for importer := range importers {
importersSlice = append(importersSlice, importer)
}

sort.Strings(importersSlice)

return importersSlice, nil
}

type cachedJsonnetFile struct {
Base string
Imports []string
Content string
IsMainFile bool
}

var jsonnetFilesMap = make(map[string]map[string]*cachedJsonnetFile)
var symlinkCache = make(map[string]string)

func evalSymlinks(path string) (string, error) {
var err error
eval, ok := symlinkCache[path]
if !ok {
eval, err = filepath.EvalSymlinks(path)
if err != nil {
return "", err
}
symlinkCache[path] = eval
}
return eval, nil
}

func findSymlinks(root, file string) ([]string, error) {
var symlinks []string

err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}

if info.Mode()&os.ModeSymlink == os.ModeSymlink {
eval, err := evalSymlinks(path)
if err != nil {
return err
}
if strings.Contains(file, eval) {
symlinks = append(symlinks, strings.Replace(file, eval, path, 1))
}
}

return nil
})

return symlinks, err
}

func findImporters(root string, searchForFile string, chain map[string]struct{}) ([]string, error) {
if _, ok := chain[searchForFile]; ok {
return nil, nil
}
chain[searchForFile] = struct{}{}

if _, ok := jsonnetFilesMap[root]; !ok {
jsonnetFilesMap[root] = make(map[string]*cachedJsonnetFile)

files, err := FindFiles(root, nil)
if err != nil {
return nil, err
}
for _, file := range files {
content, err := os.ReadFile(file)
if err != nil {
return nil, err
}
matches := importsRegexp.FindAllStringSubmatch(string(content), -1)

cachedObj := &cachedJsonnetFile{
Content: string(content),
IsMainFile: strings.HasSuffix(file, jpath.DefaultEntrypoint),
}
for _, match := range matches {
cachedObj.Imports = append(cachedObj.Imports, match[2])
}
jsonnetFilesMap[root][file] = cachedObj
}
}
jsonnetFiles := jsonnetFilesMap[root]

var importers []string
var intermediateImporters []string

for jsonnetFilePath, jsonnetFileContent := range jsonnetFiles {
isImporter := false
for _, importPath := range jsonnetFileContent.Imports {
if filepath.Base(importPath) != filepath.Base(searchForFile) { // If the filename is not the same as the file we are looking for, skip
continue
}

// Match on relative imports with ..
// Jsonnet also matches all intermediary paths for some reason, so we look at them too
doubleDotCount := strings.Count(importPath, "..")
if doubleDotCount > 0 {
importPath = strings.ReplaceAll(importPath, "../", "")
for i := 0; i <= doubleDotCount; i++ {
dir := filepath.Dir(jsonnetFilePath)
for j := 0; j < i; j++ {
dir = filepath.Dir(dir)
}
testImportPath := filepath.Join(dir, importPath)
isImporter = pathMatches(searchForFile, testImportPath)
}
}

// Match on imports to lib/ or vendor/
if !isImporter {
importPath = strings.ReplaceAll(importPath, "./", "")
isImporter = pathMatches(searchForFile, filepath.Join(root, "vendor", importPath)) || pathMatches(searchForFile, filepath.Join(root, "lib", importPath))
}

// Match on imports to the base dir where the file is located (e.g. in the env dir)
if !isImporter {
if jsonnetFileContent.Base == "" {
base, err := jpath.FindBase(jsonnetFilePath, root)
if err != nil {
return nil, err
}
jsonnetFileContent.Base = base
}
isImporter = strings.HasPrefix(searchForFile, jsonnetFileContent.Base) && strings.HasSuffix(searchForFile, importPath)
}

if isImporter {
if jsonnetFileContent.IsMainFile {
importers = append(importers, jsonnetFilePath)
} else {
intermediateImporters = append(intermediateImporters, jsonnetFilePath)
}
break
}
}
}

if len(intermediateImporters) > 0 {
newImporters, err := FindImporterForFiles(root, intermediateImporters, chain)
if err != nil {
return nil, err
}
importers = append(importers, newImporters...)
}

return importers, nil
}

func pathMatches(path1, path2 string) bool {
if path1 == path2 {
return true
}

var err error

evalPath1, err := evalSymlinks(path1)
if err != nil {
return false
}

evalPath2, err := evalSymlinks(path2)
if err != nil {
return false
}

return evalPath1 == evalPath2
}

0 comments on commit 7fdef4d

Please sign in to comment.