Skip to content

Commit

Permalink
Add new tk tool importers command (#771)
Browse files Browse the repository at this point in the history
* Add new `tk tool importers` command

This command is powerful when used with the `--merge-strategy=replace-envs` flag added here: #760

Given a repository with a large number of Tanka environments, exporting the whole set can be very time consuming if done on every commit.
What a user probably wants to do is to only export the environments that have changed.
To do this, we need to actually find out what files affect which environments, and this is what this new command does.
A user can now pass the list of files modified in a git commit, and run `tk export` on the set of environments affected by those files.

* Add comments and simplify functions
  • Loading branch information
julienduchesne authored Oct 4, 2022
1 parent c5a739e commit 51b69c5
Show file tree
Hide file tree
Showing 21 changed files with 461 additions and 6 deletions.
38 changes: 38 additions & 0 deletions cmd/tk/tool.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"strings"

"github.com/go-clix/cli"
"github.com/posener/complete"

"github.com/grafana/tanka/pkg/jsonnet"
"github.com/grafana/tanka/pkg/jsonnet/jpath"
Expand All @@ -23,6 +24,7 @@ func toolCmd() *cli.Command {
cmd.AddCommand(
jpathCmd(),
importsCmd(),
importersCmd(),
chartsCmd(),
)
return cmd
Expand Down Expand Up @@ -130,6 +132,42 @@ func importsCmd() *cli.Command {
return cmd
}

func importersCmd() *cli.Command {
cmd := &cli.Command{
Use: "importers <file> <file...>",
Short: "list all environments that either directly or transitively import the given files",
Args: cli.Args{
Validator: cli.ArgsMin(1),
Predictor: complete.PredictFiles("*"),
},
}

root := cmd.Flags().String("root", ".", "root directory to search for environments")
cmd.Run = func(cmd *cli.Command, args []string) error {
root, err := filepath.Abs(*root)
if err != nil {
return fmt.Errorf("resolving root: %w", err)
}

for _, f := range args {
if _, err := os.Stat(f); os.IsNotExist(err) {
return fmt.Errorf("file %q does not exist", f)
}
}

envs, err := jsonnet.FindImporterForFiles(root, args)
if err != nil {
return fmt.Errorf("resolving imports: %s", err)
}

fmt.Println(strings.Join(envs, "\n"))

return nil
}

return cmd
}

func gitRoot() (string, error) {
s, err := git("rev-parse", "--show-toplevel")
return strings.TrimRight(s, "\n"), err
Expand Down
270 changes: 270 additions & 0 deletions pkg/jsonnet/find_importers.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,270 @@
package jsonnet

import (
"os"
"path/filepath"
"sort"
"strings"

"github.com/grafana/tanka/pkg/jsonnet/jpath"
)

var (
jsonnetFilesMap = make(map[string]map[string]*cachedJsonnetFile)
symlinkCache = make(map[string]string)
)

type cachedJsonnetFile struct {
Base string
Imports []string
Content string
IsMainFile bool
}

// FindImporterForFiles finds the entrypoints (main.jsonnet files) that import the given files.
// It looks through imports transitively, so if a file is imported through a chain, it will still be reported.
// If the given file is a main.jsonnet file, it will be returned as well.
func FindImporterForFiles(root string, files []string) ([]string, error) {
var err error
root, err = filepath.Abs(root)
if err != nil {
return nil, err
}

importers := map[string]struct{}{}

if files, err = expandSymlinksInFiles(root, files); err != nil {
return nil, err
}

// Loop through all given files and add their importers to the list
for _, file := range files {
if filepath.Base(file) == jpath.DefaultEntrypoint {
importers[file] = struct{}{}
}

newImporters, err := findImporters(root, file, map[string]struct{}{})
if err != nil {
return nil, err
}
for _, importer := range newImporters {
importers[importer] = struct{}{}
}
}

return mapToArray(importers), nil
}

// expandSymlinksInFiles takes an array of files and adds to it:
// - all symlinks that point to the files
// - all files that are pointed to by the symlinks
func expandSymlinksInFiles(root string, files []string) ([]string, error) {
filesMap := map[string]struct{}{}

for _, file := range files {
file, err := filepath.Abs(file)
if err != nil {
return nil, err
}
filesMap[file] = struct{}{}

symlink, err := evalSymlinks(file)
if err != nil {
return nil, err
}
if symlink != file {
filesMap[symlink] = struct{}{}
}

symlinks, err := findSymlinks(root, file)
if err != nil {
return nil, err
}
for _, symlink := range symlinks {
filesMap[symlink] = struct{}{}
}
}

return mapToArray(filesMap), nil
}

// evalSymlinks returns the path after following all symlinks.
// It caches the results to avoid unnecessary work.
func evalSymlinks(path string) (string, error) {
var err error
eval, ok := symlinkCache[path]
if !ok {
eval, err = filepath.EvalSymlinks(path)
if err != nil {
return "", err
}
symlinkCache[path] = eval
}
return eval, nil
}

// findSymlinks finds all symlinks that point to the given file.
// It's restricted to the given root directory.
// It's used in the case where a user wants to find which entrypoints import a given file.
// In that case, we also want to find the entrypoints that import a symlink to the file.
func findSymlinks(root, file string) ([]string, error) {
var symlinks []string

err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}

if info.Mode()&os.ModeSymlink == os.ModeSymlink {
eval, err := evalSymlinks(path)
if err != nil {
return err
}
if strings.Contains(file, eval) {
symlinks = append(symlinks, strings.Replace(file, eval, path, 1))
}
}

return nil
})

return symlinks, err
}

func findImporters(root string, searchForFile string, chain map[string]struct{}) ([]string, error) {
// If we've already looked through this file in the current execution, don't do it again
// Jsonnet supports cyclic imports (as long as the _attributes_ being used are not cyclic)
if _, ok := chain[searchForFile]; ok {
return nil, nil
}
chain[searchForFile] = struct{}{}

// If we've never fetched the map of all jsonnet files, do it now
// This is cached for performance
if _, ok := jsonnetFilesMap[root]; !ok {
jsonnetFilesMap[root] = make(map[string]*cachedJsonnetFile)

files, err := FindFiles(root, nil)
if err != nil {
return nil, err
}
for _, file := range files {
content, err := os.ReadFile(file)
if err != nil {
return nil, err
}
matches := importsRegexp.FindAllStringSubmatch(string(content), -1)

cachedObj := &cachedJsonnetFile{
Content: string(content),
IsMainFile: strings.HasSuffix(file, jpath.DefaultEntrypoint),
}
for _, match := range matches {
cachedObj.Imports = append(cachedObj.Imports, match[2])
}
jsonnetFilesMap[root][file] = cachedObj
}
}
jsonnetFiles := jsonnetFilesMap[root]

var importers []string
var intermediateImporters []string

for jsonnetFilePath, jsonnetFileContent := range jsonnetFiles {
isImporter := false
// For all imports in all jsonnet files, check if they import the file we're looking for
for _, importPath := range jsonnetFileContent.Imports {
// If the filename is not the same as the file we are looking for, skip it
if filepath.Base(importPath) != filepath.Base(searchForFile) {
continue
}

// Match on relative imports with ..
// Jsonnet also matches all intermediary paths for some reason, so we look at them too
doubleDotCount := strings.Count(importPath, "..")
if doubleDotCount > 0 {
importPath = strings.ReplaceAll(importPath, "../", "")
for i := 0; i <= doubleDotCount; i++ {
dir := filepath.Dir(jsonnetFilePath)
for j := 0; j < i; j++ {
dir = filepath.Dir(dir)
}
testImportPath := filepath.Join(dir, importPath)
isImporter = pathMatches(searchForFile, testImportPath)
}
}

// Match on imports to lib/ or vendor/
if !isImporter {
importPath = strings.ReplaceAll(importPath, "./", "")
isImporter = pathMatches(searchForFile, filepath.Join(root, "vendor", importPath)) || pathMatches(searchForFile, filepath.Join(root, "lib", importPath))
}

// Match on imports to the base dir where the file is located (e.g. in the env dir)
if !isImporter {
if jsonnetFileContent.Base == "" {
base, err := jpath.FindBase(jsonnetFilePath, root)
if err != nil {
return nil, err
}
jsonnetFileContent.Base = base
}
isImporter = strings.HasPrefix(searchForFile, jsonnetFileContent.Base) && strings.HasSuffix(searchForFile, importPath)
}

// If the file we're looking in imports one of the files we're looking for, add it to the list
// It can either be an importer that we want to return (from a main file) or an intermediate importer
if isImporter {
if jsonnetFileContent.IsMainFile {
importers = append(importers, jsonnetFilePath)
} else {
intermediateImporters = append(intermediateImporters, jsonnetFilePath)
}
break
}
}
}

// Process intermediate importers recursively
// This will go on until we hit a main file, which will be returned
if len(intermediateImporters) > 0 {
for _, intermediateImporter := range intermediateImporters {
newImporters, err := findImporters(root, intermediateImporter, chain)
if err != nil {
return nil, err
}
importers = append(importers, newImporters...)
}
}

return importers, nil
}

func pathMatches(path1, path2 string) bool {
if path1 == path2 {
return true
}

var err error

evalPath1, err := evalSymlinks(path1)
if err != nil {
return false
}

evalPath2, err := evalSymlinks(path2)
if err != nil {
return false
}

return evalPath1 == evalPath2
}

func mapToArray(m map[string]struct{}) []string {
var arr []string
for k := range m {
arr = append(arr, k)
}
sort.Strings(arr)
return arr
}
Loading

0 comments on commit 51b69c5

Please sign in to comment.