Skip to content

Commit

Permalink
Merge pull request #2728 from dgageot/fix-2694
Browse files Browse the repository at this point in the history
[Caching] Artifact’s config is an input to digest calculation
  • Loading branch information
balopat committed Aug 27, 2019
2 parents 85453f7 + 5467834 commit fb2107f
Show file tree
Hide file tree
Showing 2 changed files with 102 additions and 20 deletions.
30 changes: 24 additions & 6 deletions pkg/skaffold/build/cache/hash.go
Expand Up @@ -30,37 +30,56 @@ import (
"github.com/pkg/errors"
)

// For testing
var (
// For testing
hashFunction = cacheHasher
hashFunction = cacheHasher
artifactConfigFunction = artifactConfig
)

func getHashForArtifact(ctx context.Context, depLister DependencyLister, a *latest.Artifact) (string, error) {
var inputs []string

// Append the artifact's configuration
config, err := artifactConfigFunction(a)
if err != nil {
return "", errors.Wrapf(err, "getting artifact's configuration for %s", a.ImageName)
}
inputs = append(inputs, config)

// Append the digest of each input file
deps, err := depLister.DependenciesForArtifact(ctx, a)
if err != nil {
return "", errors.Wrapf(err, "getting dependencies for %s", a.ImageName)
}
sort.Strings(deps)

var hashes []string
for _, d := range deps {
h, err := hashFunction(d)
if err != nil {
return "", errors.Wrapf(err, "getting hash for %s", d)
}
hashes = append(hashes, h)
inputs = append(inputs, h)
}

// get a key for the hashes
hasher := sha256.New()
enc := json.NewEncoder(hasher)
if err := enc.Encode(hashes); err != nil {
if err := enc.Encode(inputs); err != nil {
return "", err
}

return hex.EncodeToString(hasher.Sum(nil)), nil
}

func artifactConfig(a *latest.Artifact) (string, error) {
buf, err := json.Marshal(a.ArtifactType)
if err != nil {
return "", errors.Wrapf(err, "marshalling the artifact's configuration for %s", a.ImageName)
}

return string(buf), nil
}

// cacheHasher takes hashes the contents and name of a file
func cacheHasher(p string) (string, error) {
h := md5.New()
Expand All @@ -70,7 +89,6 @@ func cacheHasher(p string) (string, error) {
}
h.Write([]byte(fi.Mode().String()))
h.Write([]byte(fi.Name()))
// TODO: empty folder and empty files should not have the same hash
if fi.Mode().IsRegular() {
f, err := os.Open(p)
if err != nil {
Expand Down
92 changes: 78 additions & 14 deletions pkg/skaffold/build/cache/hash_test.go
Expand Up @@ -36,36 +36,100 @@ var mockCacheHasher = func(s string) (string, error) {
return s, nil
}

var fakeArtifactConfig = func(a *latest.Artifact) (string, error) {
if a.ArtifactType.DockerArtifact != nil {
return "docker/target=" + a.ArtifactType.DockerArtifact.Target, nil
}
return "other", nil
}

func TestGetHashForArtifact(t *testing.T) {
tests := []struct {
description string
dependencies [][]string
dependencies []string
artifact *latest.Artifact
expected string
}{
{
description: "check dependencies in different orders",
dependencies: [][]string{
{"a", "b"},
{"b", "a"},
description: "hash for artifact",
dependencies: []string{"a", "b"},
artifact: &latest.Artifact{},
expected: "1caa15f7ce87536bddbac30a39768e8e3b212bf591f9b64926fa50c40b614c66",
},
{
description: "dependencies in different orders",
dependencies: []string{"b", "a"},
artifact: &latest.Artifact{},
expected: "1caa15f7ce87536bddbac30a39768e8e3b212bf591f9b64926fa50c40b614c66",
},
{
description: "no dependencies",
artifact: &latest.Artifact{},
expected: "53ebd85adc9b03923a7dacfe6002879af526ef6067d441419d6e62fb9bf608ab",
},
{
description: "docker target",
artifact: &latest.Artifact{
ArtifactType: latest.ArtifactType{
DockerArtifact: &latest.DockerArtifact{
Target: "target",
},
},
},
expected: "f947b5aad32734914aa2dea0ec95bceff257037e6c2a529007183c3f21547eae",
},
{
description: "different docker target",
artifact: &latest.Artifact{
ArtifactType: latest.ArtifactType{
DockerArtifact: &latest.DockerArtifact{
Target: "other",
},
},
},
expected: "eb394fd4559b1d9c383f4359667a508a615b82a74e1b160fce539f86ae0842e8",
expected: "09b366c764d0e39f942283cc081d5522b9dde52e725376661808054e3ed0177f",
},
}
for _, test := range tests {
testutil.Run(t, test.description, func(t *testutil.T) {
t.Override(&hashFunction, mockCacheHasher)
t.Override(&artifactConfigFunction, fakeArtifactConfig)

for _, d := range test.dependencies {
depLister := &stubDependencyLister{dependencies: d}
actual, err := getHashForArtifact(context.Background(), depLister, nil)
depLister := &stubDependencyLister{dependencies: test.dependencies}
actual, err := getHashForArtifact(context.Background(), depLister, test.artifact)

t.CheckNoError(err)
t.CheckDeepEqual(test.expected, actual)
}
t.CheckNoError(err)
t.CheckDeepEqual(test.expected, actual)
})
}
}

func TestArtifactConfig(t *testing.T) {
testutil.Run(t, "", func(t *testutil.T) {
config1, err := artifactConfig(&latest.Artifact{
ArtifactType: latest.ArtifactType{
DockerArtifact: &latest.DockerArtifact{
Target: "target",
},
},
})
t.CheckNoError(err)

config2, err := artifactConfig(&latest.Artifact{
ArtifactType: latest.ArtifactType{
DockerArtifact: &latest.DockerArtifact{
Target: "other",
},
},
})
t.CheckNoError(err)

if config1 == config2 {
t.Errorf("configs should be different: [%s] [%s]", config1, config2)
}
})
}

func TestCacheHasher(t *testing.T) {
tests := []struct {
description string
Expand Down Expand Up @@ -114,7 +178,7 @@ func TestCacheHasher(t *testing.T) {
path := originalFile
depLister := &stubDependencyLister{dependencies: []string{tmpDir.Path(originalFile)}}

oldHash, err := getHashForArtifact(context.Background(), depLister, nil)
oldHash, err := getHashForArtifact(context.Background(), depLister, &latest.Artifact{})
t.CheckNoError(err)

test.update(originalFile, tmpDir)
Expand All @@ -123,7 +187,7 @@ func TestCacheHasher(t *testing.T) {
}

depLister = &stubDependencyLister{dependencies: []string{tmpDir.Path(path)}}
newHash, err := getHashForArtifact(context.Background(), depLister, nil)
newHash, err := getHashForArtifact(context.Background(), depLister, &latest.Artifact{})

t.CheckNoError(err)
t.CheckDeepEqual(false, test.differentHash && oldHash == newHash)
Expand Down

0 comments on commit fb2107f

Please sign in to comment.