Skip to content

Commit

Permalink
Merge pull request #3024 from a-palchikov/dima/2818/multiple-cache-ex…
Browse files Browse the repository at this point in the history
…ports

Multiple cache exports
  • Loading branch information
jedevc authored Nov 3, 2022
2 parents 6c0d3c7 + cf45d28 commit ac4f39a
Show file tree
Hide file tree
Showing 11 changed files with 447 additions and 104 deletions.
4 changes: 4 additions & 0 deletions cache/remotecache/azblob/exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,10 @@ type exporter struct {
config *Config
}

func (ce *exporter) Name() string {
return "exporting cache to azure blob store"
}

func (ce *exporter) Finalize(ctx context.Context) (map[string]string, error) {
config, descs, err := ce.chains.Marshal(ctx)
if err != nil {
Expand Down
6 changes: 6 additions & 0 deletions cache/remotecache/export.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ type ResolveCacheExporterFunc func(ctx context.Context, g session.Group, attrs m

type Exporter interface {
solver.CacheExporterTarget
// Name uniquely identifies the exporter
Name() string
// Finalize finalizes and return metadata that are returned to the client
// e.g. ExporterResponseManifestDesc
Finalize(ctx context.Context) (map[string]string, error)
Expand Down Expand Up @@ -55,6 +57,10 @@ func NewExporter(ingester content.Ingester, ref string, oci bool, compressionCon
return &contentCacheExporter{CacheExporterTarget: cc, chains: cc, ingester: ingester, oci: oci, ref: ref, comp: compressionConfig}
}

func (ce *contentCacheExporter) Name() string {
return "exporting content cache"
}

func (ce *contentCacheExporter) Config() Config {
return Config{
Compression: ce.comp,
Expand Down
4 changes: 4 additions & 0 deletions cache/remotecache/gha/gha.go
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,10 @@ func NewExporter(c *Config) (remotecache.Exporter, error) {
return &exporter{CacheExporterTarget: cc, chains: cc, cache: cache, config: c}, nil
}

func (*exporter) Name() string {
return "exporting to GitHub cache"
}

func (ce *exporter) Config() remotecache.Config {
return remotecache.Config{
Compression: compression.New(compression.Default),
Expand Down
4 changes: 4 additions & 0 deletions cache/remotecache/inline/inline.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ type exporter struct {
chains *v1.CacheChains
}

func (*exporter) Name() string {
return "exporting inline cache"
}

func (ce *exporter) Config() remotecache.Config {
return remotecache.Config{
Compression: compression.New(compression.Default),
Expand Down
4 changes: 4 additions & 0 deletions cache/remotecache/s3/s3.go
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,10 @@ type exporter struct {
config Config
}

func (*exporter) Name() string {
return "exporting cache to s3"
}

func (e *exporter) Config() remotecache.Config {
return remotecache.Config{
Compression: compression.New(compression.Default),
Expand Down
130 changes: 130 additions & 0 deletions client/client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,7 @@ func TestIntegration(t *testing.T) {
testAttestationBundle,
testSBOMScan,
testSBOMScanSingleRef,
testMultipleCacheExports,
)
tests = append(tests, diffOpTestCases()...)
integration.Run(t, tests, mirrors)
Expand Down Expand Up @@ -7812,6 +7813,135 @@ EOF
require.Equal(t, map[string]interface{}{"success": false}, attest.Predicate)
}

func testMultipleCacheExports(t *testing.T, sb integration.Sandbox) {
integration.SkipIfDockerd(t, sb, "multiple cache exports")
c, err := New(sb.Context(), sb.Address())
require.NoError(t, err)
defer c.Close()

registry, err := sb.NewRegistry()
if errors.Is(err, integration.ErrRequirements) {
t.Skip(err.Error())
}
require.NoError(t, err)

busybox := llb.Image("busybox:latest")
st := llb.Scratch()
run := func(cmd string) {
st = busybox.Run(llb.Shlex(cmd), llb.Dir("/wd")).AddMount("/wd", st)
}
run(`sh -c "echo -n foobar > const"`)
run(`sh -c "cat /dev/urandom | head -c 100 | sha256sum > unique"`)

def, err := st.Marshal(sb.Context())
require.NoError(t, err)

target := path.Join(registry, "image:test")
target2 := path.Join(registry, "image-copy:test")
cacheRef := path.Join(registry, "cache:test")
cacheOutDir, cacheOutDir2 := t.TempDir(), t.TempDir()

res, err := c.Solve(sb.Context(), def, SolveOpt{
Exports: []ExportEntry{
{
Type: ExporterImage,
Attrs: map[string]string{
"name": target,
"push": "true",
},
},
},
CacheExports: []CacheOptionsEntry{
{
Type: "local",
Attrs: map[string]string{
"dest": cacheOutDir,
},
},
{
Type: "local",
Attrs: map[string]string{
"dest": cacheOutDir2,
},
},
{
Type: "registry",
Attrs: map[string]string{
"ref": cacheRef,
},
},
{
Type: "inline",
},
},
}, nil)
require.NoError(t, err)

ensureFile(t, filepath.Join(cacheOutDir, "index.json"))
ensureFile(t, filepath.Join(cacheOutDir2, "index.json"))

dgst := res.ExporterResponse[exptypes.ExporterImageDigestKey]

uniqueFile, err := readFileInImage(sb.Context(), t, c, target+"@"+dgst, "/unique")
require.NoError(t, err)

res, err = c.Solve(sb.Context(), def, SolveOpt{
Exports: []ExportEntry{
{
Type: ExporterImage,
Attrs: map[string]string{
"name": target2,
"push": "true",
},
},
},
CacheExports: []CacheOptionsEntry{
{
Type: "inline",
},
},
}, nil)
require.NoError(t, err)

dgst2 := res.ExporterResponse[exptypes.ExporterImageDigestKey]
require.Equal(t, dgst, dgst2)

destDir := t.TempDir()
ensurePruneAll(t, c, sb)
_, err = c.Solve(sb.Context(), def, SolveOpt{
Exports: []ExportEntry{
{
Type: ExporterLocal,
OutputDir: destDir,
},
},
CacheImports: []CacheOptionsEntry{
{
Type: "registry",
Attrs: map[string]string{
"ref": cacheRef,
},
},
},
}, nil)
require.NoError(t, err)

ensureFileContents(t, filepath.Join(destDir, "const"), "foobar")
ensureFileContents(t, filepath.Join(destDir, "unique"), string(uniqueFile))
}

func ensureFile(t *testing.T, path string) {
st, err := os.Stat(path)
require.NoError(t, err, "expected file at %s", path)
require.True(t, st.Mode().IsRegular())
}

func ensureFileContents(t *testing.T, path, expectedContents string) {
contents, err := os.ReadFile(path)
require.NoError(t, err)
require.Equal(t, expectedContents, string(contents))
}

func makeSSHAgentSock(t *testing.T, agent agent.Agent) (p string, err error) {
tmpDir, err := integration.Tmpdir(t)
if err != nil {
Expand Down
31 changes: 14 additions & 17 deletions control/control.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import (
"github.com/moby/buildkit/solver"
"github.com/moby/buildkit/solver/llbsolver"
"github.com/moby/buildkit/solver/llbsolver/proc"
solverutil "github.com/moby/buildkit/solver/llbsolver/util"
"github.com/moby/buildkit/solver/pb"
"github.com/moby/buildkit/util/bklog"
"github.com/moby/buildkit/util/imageutil"
Expand Down Expand Up @@ -74,7 +75,6 @@ func NewController(opt Opt) (*Controller, error) {
SessionManager: opt.SessionManager,
Entitlements: opt.Entitlements,
})

if err != nil {
return nil, errors.Wrap(err, "failed to create solver")
}
Expand Down Expand Up @@ -293,31 +293,29 @@ func (c *Controller) Solve(ctx context.Context, req *controlapi.SolveRequest) (*
}
}

var (
cacheExporter remotecache.Exporter
cacheExportMode solver.CacheExportMode
cacheImports []frontend.CacheOptionsEntry
)
if len(req.Cache.Exports) > 1 {
// TODO(AkihiroSuda): this should be fairly easy
return nil, errors.New("specifying multiple cache exports is not supported currently")
}
var cacheImports []frontend.CacheOptionsEntry

if len(req.Cache.Exports) == 1 {
e := req.Cache.Exports[0]
var cacheExporters []llbsolver.RemoteCacheExporter
exportCacheOptEntries, err := solverutil.DedupCacheOptions(req.Cache.Exports)
if err != nil {
return nil, err
}
for _, e := range exportCacheOptEntries {
cacheExporterFunc, ok := c.opt.ResolveCacheExporterFuncs[e.Type]
if !ok {
return nil, errors.Errorf("unknown cache exporter: %q", e.Type)
}
cacheExporter, err = cacheExporterFunc(ctx, session.NewGroup(req.Session), e.Attrs)
var exp llbsolver.RemoteCacheExporter
exp.Exporter, err = cacheExporterFunc(ctx, session.NewGroup(req.Session), e.Attrs)
if err != nil {
return nil, errors.Wrapf(err, "failed to configure %v cache exporter", e.Type)
}
if exportMode, supported := parseCacheExportMode(e.Attrs["mode"]); !supported {
bklog.G(ctx).Debugf("skipping invalid cache export mode: %s", e.Attrs["mode"])
} else {
cacheExportMode = exportMode
exp.CacheExportMode = exportMode
}
cacheExporters = append(cacheExporters, exp)
}
for _, im := range req.Cache.Imports {
cacheImports = append(cacheImports, frontend.CacheOptionsEntry{
Expand Down Expand Up @@ -352,9 +350,8 @@ func (c *Controller) Solve(ctx context.Context, req *controlapi.SolveRequest) (*
FrontendInputs: req.FrontendInputs,
CacheImports: cacheImports,
}, llbsolver.ExporterRequest{
Exporter: expi,
CacheExporter: cacheExporter,
CacheExportMode: cacheExportMode,
Exporter: expi,
CacheExporters: cacheExporters,
}, req.Entitlements, procs)
if err != nil {
return nil, err
Expand Down
Loading

0 comments on commit ac4f39a

Please sign in to comment.