Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: allow ignoring remote cache-export error if failing #3430

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
5 changes: 5 additions & 0 deletions README.md
Expand Up @@ -390,6 +390,7 @@ buildctl build ... \
* `compression=<uncompressed|gzip|estargz|zstd>`: choose compression type for layers newly created and cached, gzip is default value. estargz and zstd should be used with `oci-mediatypes=true`
* `compression-level=<value>`: choose compression level for gzip, estargz (0-9) and zstd (0-22)
* `force-compression=true`: forcibly apply `compression` option to all layers
* `ignore-error=<false|true>`: specify if error is ignored in case cache export fails (default: `false`)

`--import-cache` options:
* `type=registry`
Expand All @@ -415,6 +416,7 @@ The directory layout conforms to OCI Image Spec v1.0.
* `compression=<uncompressed|gzip|estargz|zstd>`: choose compression type for layers newly created and cached, gzip is default value. estargz and zstd should be used with `oci-mediatypes=true`.
* `compression-level=<value>`: compression level for gzip, estargz (0-9) and zstd (0-22)
* `force-compression=true`: forcibly apply `compression` option to all layers
* `ignore-error=<false|true>`: specify if error is ignored in case cache export fails (default: `false`)

`--import-cache` options:
* `type=local`
Expand Down Expand Up @@ -449,6 +451,7 @@ in your workflow to expose the runtime.
* `min`: only export layers for the resulting image
* `max`: export all the layers of all intermediate steps
* `scope=<scope>`: which scope cache object belongs to (default `buildkit`)
* `ignore-error=<false|true>`: specify if error is ignored in case cache export fails (default: `false`)

`--import-cache` options:
* `type=gha`
Expand Down Expand Up @@ -496,6 +499,7 @@ Others options are:
* `prefix=<prefix>`: set global prefix to store / read files on s3 (default: empty)
* `name=<manifest>`: specify name of the manifest to use (default `buildkit`)
* Multiple manifest names can be specified at the same time, separated by `;`. The standard use case is to use the git sha1 as name, and the branch name as duplicate, and load both with 2 `import-cache` commands.
* `ignore-error=<false|true>`: specify if error is ignored in case cache export fails (default: `false`)

`--import-cache` options:
* `type=s3`
Expand Down Expand Up @@ -540,6 +544,7 @@ There are 2 options supported for Azure Blob Storage authentication:
* `prefix=<prefix>`: set global prefix to store / read files on the Azure Blob Storage container (`<container>`) (default: empty)
* `name=<manifest>`: specify name of the manifest to use (default: `buildkit`)
* Multiple manifest names can be specified at the same time, separated by `;`. The standard use case is to use the git sha1 as name, and the branch name as duplicate, and load both with 2 `import-cache` commands.
* `ignore-error=<false|true>`: specify if error is ignored in case cache export fails (default: `false`)

`--import-cache` options:
* `type=azblob`
Expand Down
108 changes: 108 additions & 0 deletions client/client_test.go
Expand Up @@ -168,6 +168,7 @@ func TestIntegration(t *testing.T) {
testBuildInfoInline,
testBuildInfoNoExport,
testZstdLocalCacheExport,
testCacheExportIgnoreError,
testZstdRegistryCacheImportExport,
testZstdLocalCacheImportExport,
testUncompressedLocalCacheImportExport,
Expand Down Expand Up @@ -4351,6 +4352,113 @@ func testZstdLocalCacheExport(t *testing.T, sb integration.Sandbox) {
require.Equal(t, dt[:4], []byte{0x28, 0xb5, 0x2f, 0xfd})
}

func testCacheExportIgnoreError(t *testing.T, sb integration.Sandbox) {
integration.CheckFeatureCompat(t, sb, integration.FeatureCacheExport)
c, err := New(sb.Context(), sb.Address())
require.NoError(t, err)
defer c.Close()

busybox := llb.Image("busybox:latest")
cmd := `sh -e -c "echo -n ignore-error > data"`

st := llb.Scratch()
st = busybox.Run(llb.Shlex(cmd), llb.Dir("/wd")).AddMount("/wd", st)

def, err := st.Marshal(sb.Context())
require.NoError(t, err)

tests := map[string]struct {
Exports []ExportEntry
CacheExports []CacheOptionsEntry
expectedErrors []string
}{
"local-ignore-error": {
Exports: []ExportEntry{
{
Type: ExporterLocal,
OutputDir: t.TempDir(),
},
},
CacheExports: []CacheOptionsEntry{
{
Type: "local",
Attrs: map[string]string{
"dest": "éèç",
},
},
},
expectedErrors: []string{"failed to solve", "contains value with non-printable ASCII characters"},
},
"registry-ignore-error": {
Exports: []ExportEntry{
{
Type: ExporterImage,
Attrs: map[string]string{
"name": "test-registry-ignore-error",
"push": "false",
},
},
},
CacheExports: []CacheOptionsEntry{
{
Type: "registry",
Attrs: map[string]string{
"ref": "fake-url:5000/myrepo:buildcache",
},
},
},
expectedErrors: []string{"failed to solve", "dial tcp: lookup fake-url", "no such host"},
},
"s3-ignore-error": {
Exports: []ExportEntry{
{
Type: ExporterLocal,
OutputDir: t.TempDir(),
},
},
CacheExports: []CacheOptionsEntry{
{
Type: "s3",
Attrs: map[string]string{
"endpoint_url": "http://fake-url:9000",
"bucket": "my-bucket",
"region": "us-east-1",
"access_key_id": "minioadmin",
"secret_access_key": "minioadmin",
"use_path_style": "true",
},
},
},
expectedErrors: []string{"failed to solve", "dial tcp: lookup fake-url", "no such host"},
},
}
ignoreErrorValues := []bool{true, false}
for _, ignoreError := range ignoreErrorValues {
ignoreErrStr := strconv.FormatBool(ignoreError)
for n, test := range tests {
require.Equal(t, 1, len(test.Exports))
require.Equal(t, 1, len(test.CacheExports))
require.NotEmpty(t, test.CacheExports[0].Attrs)
test.CacheExports[0].Attrs["ignore-error"] = ignoreErrStr
testName := fmt.Sprintf("%s-%s", n, ignoreErrStr)
t.Run(testName, func(t *testing.T) {
_, err = c.Solve(sb.Context(), def, SolveOpt{
Exports: test.Exports,
CacheExports: test.CacheExports,
}, nil)
if ignoreError {
require.NoError(t, err)
} else {
require.Error(t, err)
for _, errStr := range test.expectedErrors {
require.Contains(t, err.Error(), errStr)
}
}
})
}
}
}

func testUncompressedLocalCacheImportExport(t *testing.T, sb integration.Sandbox) {
integration.CheckFeatureCompat(t, sb, integration.FeatureCacheExport)
dir := t.TempDir()
Expand Down
16 changes: 16 additions & 0 deletions control/control.go
Expand Up @@ -3,6 +3,7 @@ package control
import (
"context"
"fmt"
"strconv"
"sync"
"sync/atomic"
"time"
Expand Down Expand Up @@ -368,6 +369,13 @@ func (c *Controller) Solve(ctx context.Context, req *controlapi.SolveRequest) (*
} else {
exp.CacheExportMode = exportMode
}
if ignoreErrorStr, ok := e.Attrs["ignore-error"]; ok {
if ignoreError, supported := parseCacheExportIgnoreError(ignoreErrorStr); !supported {
bklog.G(ctx).Debugf("skipping invalid cache export ignore-error: %s", e.Attrs["ignore-error"])
} else {
exp.IgnoreError = ignoreError
}
}
cacheExporters = append(cacheExporters, exp)
}

Expand Down Expand Up @@ -550,6 +558,14 @@ func parseCacheExportMode(mode string) (solver.CacheExportMode, bool) {
return solver.CacheExportModeMin, false
}

func parseCacheExportIgnoreError(ignoreErrorStr string) (bool, bool) {
ignoreError, err := strconv.ParseBool(ignoreErrorStr)
if err != nil {
return false, false
}
return ignoreError, true
}

func toPBGCPolicy(in []client.PruneInfo) []*apitypes.GCPolicy {
policy := make([]*apitypes.GCPolicy, 0, len(in))
for _, p := range in {
Expand Down
62 changes: 62 additions & 0 deletions control/control_test.go
Expand Up @@ -84,3 +84,65 @@ func TestDuplicateCacheOptions(t *testing.T) {
})
}
}

func TestParseCacheExportIgnoreError(t *testing.T) {
tests := map[string]struct {
expectedIgnoreError bool
expectedSupported bool
}{
"": {
expectedIgnoreError: false,
expectedSupported: false,
},
".": {
expectedIgnoreError: false,
expectedSupported: false,
},
"fake": {
expectedIgnoreError: false,
expectedSupported: false,
},
"true": {
expectedIgnoreError: true,
expectedSupported: true,
},
"True": {
expectedIgnoreError: true,
expectedSupported: true,
},
"TRUE": {
expectedIgnoreError: true,
expectedSupported: true,
},
"truee": {
expectedIgnoreError: false,
expectedSupported: false,
},
"false": {
expectedIgnoreError: false,
expectedSupported: true,
},
"False": {
expectedIgnoreError: false,
expectedSupported: true,
},
"FALSE": {
expectedIgnoreError: false,
expectedSupported: true,
},
"ffalse": {
expectedIgnoreError: false,
expectedSupported: false,
},
}

for ignoreErrStr, test := range tests {
t.Run(ignoreErrStr, func(t *testing.T) {
ignoreErr, supported := parseCacheExportIgnoreError(ignoreErrStr)
t.Log("checking expectedIgnoreError")
require.Equal(t, ignoreErr, test.expectedIgnoreError)
t.Log("checking expectedSupported")
require.Equal(t, supported, test.expectedSupported)
})
}
}
13 changes: 7 additions & 6 deletions solver/llbsolver/solver.go
Expand Up @@ -58,6 +58,7 @@ type ExporterRequest struct {
type RemoteCacheExporter struct {
remotecache.Exporter
solver.CacheExportMode
IgnoreError bool
}

// ResolveWorkerFunc returns default worker for the temporary default non-distributed use cases
Expand Down Expand Up @@ -570,7 +571,7 @@ func runCacheExporters(ctx context.Context, exporters []RemoteCacheExporter, j *
func(exp RemoteCacheExporter, i int) {
eg.Go(func() (err error) {
id := fmt.Sprint(j.SessionID, "-cache-", i)
return inBuilderContext(ctx, j, exp.Exporter.Name(), id, func(ctx context.Context, _ session.Group) error {
err = inBuilderContext(ctx, j, exp.Exporter.Name(), id, func(ctx context.Context, _ session.Group) error {
prepareDone := progress.OneOff(ctx, "preparing build cache for export")
if err := result.EachRef(cached, inp, func(res solver.CachedResult, ref cache.ImmutableRef) error {
ctx = withDescHandlerCacheOpts(ctx, ref)
Expand All @@ -589,13 +590,13 @@ func runCacheExporters(ctx context.Context, exporters []RemoteCacheExporter, j *
}); err != nil {
return prepareDone(err)
}
prepareDone(nil)
resps[i], err = exp.Finalize(ctx)
if err != nil {
return err
}
return nil
return prepareDone(err)
})
if exp.IgnoreError {
err = nil
}
return err
})
}(exp, i)
}
Expand Down