Merge pull request #2556 from tonistiigi/bake-call

bake: add call methods support and printing
This commit is contained in:
Tõnis Tiigi 2024-07-03 10:40:32 -07:00 committed by GitHub
commit c51004e2e4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 552 additions and 90 deletions

View File

@ -177,7 +177,7 @@ func readWithProgress(r io.Reader, setStatus func(st *client.VertexStatus)) (dt
} }
func ListTargets(files []File) ([]string, error) { func ListTargets(files []File) ([]string, error) {
c, err := ParseFiles(files, nil) c, _, err := ParseFiles(files, nil)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -192,7 +192,7 @@ func ListTargets(files []File) ([]string, error) {
} }
func ReadTargets(ctx context.Context, files []File, targets, overrides []string, defaults map[string]string) (map[string]*Target, map[string]*Group, error) { func ReadTargets(ctx context.Context, files []File, targets, overrides []string, defaults map[string]string) (map[string]*Target, map[string]*Group, error) {
c, err := ParseFiles(files, defaults) c, _, err := ParseFiles(files, defaults)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -298,7 +298,7 @@ func sliceToMap(env []string) (res map[string]string) {
return return
} }
func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error) { func ParseFiles(files []File, defaults map[string]string) (_ *Config, _ *hclparser.ParseMeta, err error) {
defer func() { defer func() {
err = formatHCLError(err, files) err = formatHCLError(err, files)
}() }()
@ -310,7 +310,7 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error)
isCompose, composeErr := validateComposeFile(f.Data, f.Name) isCompose, composeErr := validateComposeFile(f.Data, f.Name)
if isCompose { if isCompose {
if composeErr != nil { if composeErr != nil {
return nil, composeErr return nil, nil, composeErr
} }
composeFiles = append(composeFiles, f) composeFiles = append(composeFiles, f)
} }
@ -318,13 +318,13 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error)
hf, isHCL, err := ParseHCLFile(f.Data, f.Name) hf, isHCL, err := ParseHCLFile(f.Data, f.Name)
if isHCL { if isHCL {
if err != nil { if err != nil {
return nil, err return nil, nil, err
} }
hclFiles = append(hclFiles, hf) hclFiles = append(hclFiles, hf)
} else if composeErr != nil { } else if composeErr != nil {
return nil, errors.Wrapf(err, "failed to parse %s: parsing yaml: %v, parsing hcl", f.Name, composeErr) return nil, nil, errors.Wrapf(err, "failed to parse %s: parsing yaml: %v, parsing hcl", f.Name, composeErr)
} else { } else {
return nil, err return nil, nil, err
} }
} }
} }
@ -332,23 +332,24 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error)
if len(composeFiles) > 0 { if len(composeFiles) > 0 {
cfg, cmperr := ParseComposeFiles(composeFiles) cfg, cmperr := ParseComposeFiles(composeFiles)
if cmperr != nil { if cmperr != nil {
return nil, errors.Wrap(cmperr, "failed to parse compose file") return nil, nil, errors.Wrap(cmperr, "failed to parse compose file")
} }
c = mergeConfig(c, *cfg) c = mergeConfig(c, *cfg)
c = dedupeConfig(c) c = dedupeConfig(c)
} }
var pm hclparser.ParseMeta
if len(hclFiles) > 0 { if len(hclFiles) > 0 {
renamed, err := hclparser.Parse(hclparser.MergeFiles(hclFiles), hclparser.Opt{ res, err := hclparser.Parse(hclparser.MergeFiles(hclFiles), hclparser.Opt{
LookupVar: os.LookupEnv, LookupVar: os.LookupEnv,
Vars: defaults, Vars: defaults,
ValidateLabel: validateTargetName, ValidateLabel: validateTargetName,
}, &c) }, &c)
if err.HasErrors() { if err.HasErrors() {
return nil, err return nil, nil, err
} }
for _, renamed := range renamed { for _, renamed := range res.Renamed {
for oldName, newNames := range renamed { for oldName, newNames := range renamed {
newNames = dedupSlice(newNames) newNames = dedupSlice(newNames)
if len(newNames) == 1 && oldName == newNames[0] { if len(newNames) == 1 && oldName == newNames[0] {
@ -361,9 +362,10 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error)
} }
} }
c = dedupeConfig(c) c = dedupeConfig(c)
pm = *res
} }
return &c, nil return &c, &pm, nil
} }
func dedupeConfig(c Config) Config { func dedupeConfig(c Config) Config {
@ -388,7 +390,8 @@ func dedupeConfig(c Config) Config {
} }
func ParseFile(dt []byte, fn string) (*Config, error) { func ParseFile(dt []byte, fn string) (*Config, error) {
return ParseFiles([]File{{Data: dt, Name: fn}}, nil) c, _, err := ParseFiles([]File{{Data: dt, Name: fn}}, nil)
return c, err
} }
type Config struct { type Config struct {
@ -669,13 +672,15 @@ func (c Config) target(name string, visited map[string]*Target, overrides map[st
} }
type Group struct { type Group struct {
Name string `json:"-" hcl:"name,label" cty:"name"` Name string `json:"-" hcl:"name,label" cty:"name"`
Targets []string `json:"targets" hcl:"targets" cty:"targets"` Description string `json:"description,omitempty" hcl:"description,optional" cty:"description"`
Targets []string `json:"targets" hcl:"targets" cty:"targets"`
// Target // TODO? // Target // TODO?
} }
type Target struct { type Target struct {
Name string `json:"-" hcl:"name,label" cty:"name"` Name string `json:"-" hcl:"name,label" cty:"name"`
Description string `json:"description,omitempty" hcl:"description,optional" cty:"description"`
// Inherits is the only field that cannot be overridden with --set // Inherits is the only field that cannot be overridden with --set
Inherits []string `json:"inherits,omitempty" hcl:"inherits,optional" cty:"inherits"` Inherits []string `json:"inherits,omitempty" hcl:"inherits,optional" cty:"inherits"`
@ -702,7 +707,8 @@ type Target struct {
NoCacheFilter []string `json:"no-cache-filter,omitempty" hcl:"no-cache-filter,optional" cty:"no-cache-filter"` NoCacheFilter []string `json:"no-cache-filter,omitempty" hcl:"no-cache-filter,optional" cty:"no-cache-filter"`
ShmSize *string `json:"shm-size,omitempty" hcl:"shm-size,optional"` ShmSize *string `json:"shm-size,omitempty" hcl:"shm-size,optional"`
Ulimits []string `json:"ulimits,omitempty" hcl:"ulimits,optional"` Ulimits []string `json:"ulimits,omitempty" hcl:"ulimits,optional"`
// IMPORTANT: if you add more fields here, do not forget to update newOverrides and docs/bake-reference.md. Call *string `json:"call,omitempty" hcl:"call,optional" cty:"call"`
// IMPORTANT: if you add more fields here, do not forget to update newOverrides/AddOverrides and docs/bake-reference.md.
// linked is a private field to mark a target used as a linked one // linked is a private field to mark a target used as a linked one
linked bool linked bool
@ -776,6 +782,9 @@ func (t *Target) Merge(t2 *Target) {
if t2.Target != nil { if t2.Target != nil {
t.Target = t2.Target t.Target = t2.Target
} }
if t2.Call != nil {
t.Call = t2.Call
}
if t2.Annotations != nil { // merge if t2.Annotations != nil { // merge
t.Annotations = append(t.Annotations, t2.Annotations...) t.Annotations = append(t.Annotations, t2.Annotations...)
} }
@ -819,6 +828,9 @@ func (t *Target) Merge(t2 *Target) {
if t2.Ulimits != nil { // merge if t2.Ulimits != nil { // merge
t.Ulimits = append(t.Ulimits, t2.Ulimits...) t.Ulimits = append(t.Ulimits, t2.Ulimits...)
} }
if t2.Description != "" {
t.Description = t2.Description
}
t.Inherits = append(t.Inherits, t2.Inherits...) t.Inherits = append(t.Inherits, t2.Inherits...)
} }
@ -863,6 +875,8 @@ func (t *Target) AddOverrides(overrides map[string]Override) error {
t.CacheTo = o.ArrValue t.CacheTo = o.ArrValue
case "target": case "target":
t.Target = &value t.Target = &value
case "call":
t.Call = &value
case "secrets": case "secrets":
t.Secrets = o.ArrValue t.Secrets = o.ArrValue
case "ssh": case "ssh":
@ -1298,6 +1312,12 @@ func toBuildOpt(t *Target, inp *Input) (*build.Options, error) {
bo.Target = *t.Target bo.Target = *t.Target
} }
if t.Call != nil {
bo.PrintFunc = &build.PrintFunc{
Name: *t.Call,
}
}
cacheImports, err := buildflags.ParseCacheEntry(t.CacheFrom) cacheImports, err := buildflags.ParseCacheEntry(t.CacheFrom)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -1528,7 +1528,7 @@ services:
v2: "bar" v2: "bar"
`) `)
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{Data: dt, Name: "c1.foo"}, {Data: dt, Name: "c1.foo"},
{Data: dt2, Name: "c2.bar"}, {Data: dt2, Name: "c2.bar"},
}, nil) }, nil)

View File

@ -273,7 +273,7 @@ func TestHCLMultiFileSharedVariables(t *testing.T) {
} }
`) `)
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@ -285,7 +285,7 @@ func TestHCLMultiFileSharedVariables(t *testing.T) {
t.Setenv("FOO", "def") t.Setenv("FOO", "def")
c, err = ParseFiles([]File{ c, _, err = ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@ -322,7 +322,7 @@ func TestHCLVarsWithVars(t *testing.T) {
} }
`) `)
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@ -334,7 +334,7 @@ func TestHCLVarsWithVars(t *testing.T) {
t.Setenv("BASE", "new") t.Setenv("BASE", "new")
c, err = ParseFiles([]File{ c, _, err = ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@ -612,7 +612,7 @@ func TestHCLMultiFileAttrs(t *testing.T) {
FOO="def" FOO="def"
`) `)
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@ -623,7 +623,7 @@ func TestHCLMultiFileAttrs(t *testing.T) {
t.Setenv("FOO", "ghi") t.Setenv("FOO", "ghi")
c, err = ParseFiles([]File{ c, _, err = ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@ -647,7 +647,7 @@ func TestHCLMultiFileGlobalAttrs(t *testing.T) {
FOO = "def" FOO = "def"
`) `)
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@ -830,7 +830,7 @@ func TestHCLRenameMultiFile(t *testing.T) {
} }
`) `)
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
{Data: dt3, Name: "c3.hcl"}, {Data: dt3, Name: "c3.hcl"},
@ -1050,7 +1050,7 @@ func TestHCLMatrixArgsOverride(t *testing.T) {
} }
`) `)
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{Data: dt, Name: "docker-bake.hcl"}, {Data: dt, Name: "docker-bake.hcl"},
}, map[string]string{"ABC": "11,22,33"}) }, map[string]string{"ABC": "11,22,33"})
require.NoError(t, err) require.NoError(t, err)
@ -1236,7 +1236,7 @@ services:
v2: "bar" v2: "bar"
`) `)
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.yml"}, {Data: dt2, Name: "c2.yml"},
}, nil) }, nil)
@ -1258,7 +1258,7 @@ func TestHCLBuiltinVars(t *testing.T) {
} }
`) `)
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
}, map[string]string{ }, map[string]string{
"BAKE_CMD_CONTEXT": "foo", "BAKE_CMD_CONTEXT": "foo",
@ -1272,7 +1272,7 @@ func TestHCLBuiltinVars(t *testing.T) {
} }
func TestCombineHCLAndJSONTargets(t *testing.T) { func TestCombineHCLAndJSONTargets(t *testing.T) {
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{ {
Name: "docker-bake.hcl", Name: "docker-bake.hcl",
Data: []byte(` Data: []byte(`
@ -1348,7 +1348,7 @@ target "b" {
} }
func TestCombineHCLAndJSONVars(t *testing.T) { func TestCombineHCLAndJSONVars(t *testing.T) {
c, err := ParseFiles([]File{ c, _, err := ParseFiles([]File{
{ {
Name: "docker-bake.hcl", Name: "docker-bake.hcl",
Data: []byte(` Data: []byte(`

View File

@ -25,9 +25,11 @@ type Opt struct {
} }
type variable struct { type variable struct {
Name string `json:"-" hcl:"name,label"` Name string `json:"-" hcl:"name,label"`
Default *hcl.Attribute `json:"default,omitempty" hcl:"default,optional"` Default *hcl.Attribute `json:"default,omitempty" hcl:"default,optional"`
Body hcl.Body `json:"-" hcl:",body"` Description string `json:"description,omitempty" hcl:"description,optional"`
Body hcl.Body `json:"-" hcl:",body"`
Remain hcl.Body `json:"-" hcl:",remain"`
} }
type functionDef struct { type functionDef struct {
@ -534,7 +536,18 @@ func (p *parser) resolveBlockNames(block *hcl.Block) ([]string, error) {
return names, nil return names, nil
} }
func Parse(b hcl.Body, opt Opt, val interface{}) (map[string]map[string][]string, hcl.Diagnostics) { type Variable struct {
Name string
Description string
Value *string
}
type ParseMeta struct {
Renamed map[string]map[string][]string
AllVariables []*Variable
}
func Parse(b hcl.Body, opt Opt, val interface{}) (*ParseMeta, hcl.Diagnostics) {
reserved := map[string]struct{}{} reserved := map[string]struct{}{}
schema, _ := gohcl.ImpliedBodySchema(val) schema, _ := gohcl.ImpliedBodySchema(val)
@ -643,6 +656,7 @@ func Parse(b hcl.Body, opt Opt, val interface{}) (map[string]map[string][]string
} }
} }
vars := make([]*Variable, 0, len(p.vars))
for k := range p.vars { for k := range p.vars {
if err := p.resolveValue(p.ectx, k); err != nil { if err := p.resolveValue(p.ectx, k); err != nil {
if diags, ok := err.(hcl.Diagnostics); ok { if diags, ok := err.(hcl.Diagnostics); ok {
@ -651,6 +665,21 @@ func Parse(b hcl.Body, opt Opt, val interface{}) (map[string]map[string][]string
r := p.vars[k].Body.MissingItemRange() r := p.vars[k].Body.MissingItemRange()
return nil, wrapErrorDiagnostic("Invalid value", err, &r, &r) return nil, wrapErrorDiagnostic("Invalid value", err, &r, &r)
} }
v := &Variable{
Name: p.vars[k].Name,
Description: p.vars[k].Description,
}
if vv := p.ectx.Variables[k]; !vv.IsNull() {
var s string
switch vv.Type() {
case cty.String:
s = vv.AsString()
case cty.Bool:
s = strconv.FormatBool(vv.True())
}
v.Value = &s
}
vars = append(vars, v)
} }
for k := range p.funcs { for k := range p.funcs {
@ -795,7 +824,10 @@ func Parse(b hcl.Body, opt Opt, val interface{}) (map[string]map[string][]string
} }
} }
return renamed, nil return &ParseMeta{
Renamed: renamed,
AllVariables: vars,
}, nil
} }
// wrapErrorDiagnostic wraps an error into a hcl.Diagnostics object. // wrapErrorDiagnostic wraps an error into a hcl.Diagnostics object.

View File

@ -111,21 +111,19 @@ func (mb mergedBodies) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
diags = append(diags, thisDiags...) diags = append(diags, thisDiags...)
} }
if thisAttrs != nil { for name, attr := range thisAttrs {
for name, attr := range thisAttrs { if existing := attrs[name]; existing != nil {
if existing := attrs[name]; existing != nil { diags = diags.Append(&hcl.Diagnostic{
diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError,
Severity: hcl.DiagError, Summary: "Duplicate argument",
Summary: "Duplicate argument", Detail: fmt.Sprintf(
Detail: fmt.Sprintf( "Argument %q was already set at %s",
"Argument %q was already set at %s", name, existing.NameRange.String(),
name, existing.NameRange.String(), ),
), Subject: thisAttrs[name].NameRange.Ptr(),
Subject: thisAttrs[name].NameRange.Ptr(), })
})
}
attrs[name] = attr
} }
attrs[name] = attr
} }
} }

View File

@ -1,20 +1,27 @@
package commands package commands
import ( import (
"bytes"
"cmp"
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"os" "os"
"slices"
"strings" "strings"
"text/tabwriter"
"github.com/containerd/console" "github.com/containerd/console"
"github.com/containerd/platforms" "github.com/containerd/platforms"
"github.com/docker/buildx/bake" "github.com/docker/buildx/bake"
"github.com/docker/buildx/bake/hclparser"
"github.com/docker/buildx/build" "github.com/docker/buildx/build"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/controller/pb"
"github.com/docker/buildx/localstate" "github.com/docker/buildx/localstate"
"github.com/docker/buildx/util/buildflags" "github.com/docker/buildx/util/buildflags"
"github.com/docker/buildx/util/cobrautil"
"github.com/docker/buildx/util/cobrautil/completion" "github.com/docker/buildx/util/cobrautil/completion"
"github.com/docker/buildx/util/confutil" "github.com/docker/buildx/util/confutil"
"github.com/docker/buildx/util/desktop" "github.com/docker/buildx/util/desktop"
@ -30,16 +37,19 @@ import (
) )
type bakeOptions struct { type bakeOptions struct {
files []string files []string
overrides []string overrides []string
printOnly bool printOnly bool
sbom string listTargets bool
provenance string listVars bool
sbom string
provenance string
builder string builder string
metadataFile string metadataFile string
exportPush bool exportPush bool
exportLoad bool exportLoad bool
callFunc string
} }
func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in bakeOptions, cFlags commonFlags) (err error) { func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in bakeOptions, cFlags commonFlags) (err error) {
@ -71,6 +81,11 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
targets = []string{"default"} targets = []string{"default"}
} }
callFunc, err := buildflags.ParsePrintFunc(in.callFunc)
if err != nil {
return err
}
overrides := in.overrides overrides := in.overrides
if in.exportPush { if in.exportPush {
overrides = append(overrides, "*.push=true") overrides = append(overrides, "*.push=true")
@ -78,6 +93,9 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
if in.exportLoad { if in.exportLoad {
overrides = append(overrides, "*.load=true") overrides = append(overrides, "*.load=true")
} }
if callFunc != nil {
overrides = append(overrides, fmt.Sprintf("*.call=%s", callFunc.Name))
}
if cFlags.noCache != nil { if cFlags.noCache != nil {
overrides = append(overrides, fmt.Sprintf("*.no-cache=%t", *cFlags.noCache)) overrides = append(overrides, fmt.Sprintf("*.no-cache=%t", *cFlags.noCache))
} }
@ -171,12 +189,32 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
return errors.New("couldn't find a bake definition") return errors.New("couldn't find a bake definition")
} }
tgts, grps, err := bake.ReadTargets(ctx, files, targets, overrides, map[string]string{ defaults := map[string]string{
// don't forget to update documentation if you add a new // don't forget to update documentation if you add a new
// built-in variable: docs/bake-reference.md#built-in-variables // built-in variable: docs/bake-reference.md#built-in-variables
"BAKE_CMD_CONTEXT": cmdContext, "BAKE_CMD_CONTEXT": cmdContext,
"BAKE_LOCAL_PLATFORM": platforms.Format(platforms.DefaultSpec()), "BAKE_LOCAL_PLATFORM": platforms.Format(platforms.DefaultSpec()),
}) }
if in.listTargets || in.listVars {
cfg, pm, err := bake.ParseFiles(files, defaults)
if err != nil {
return err
}
err = printer.Wait()
printer = nil
if err != nil {
return err
}
if in.listTargets {
return printTargetList(dockerCli.Out(), cfg)
} else if in.listVars {
return printVars(dockerCli.Out(), pm.AllVariables)
}
}
tgts, grps, err := bake.ReadTargets(ctx, files, targets, overrides, defaults)
if err != nil { if err != nil {
return err return err
} }
@ -222,6 +260,16 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
return nil return nil
} }
for _, opt := range bo {
if opt.PrintFunc != nil {
cf, err := buildflags.ParsePrintFunc(opt.PrintFunc.Name)
if err != nil {
return err
}
opt.PrintFunc.Name = cf.Name
}
}
prm := confutil.MetadataProvenance() prm := confutil.MetadataProvenance()
if len(in.metadataFile) == 0 { if len(in.metadataFile) == 0 {
prm = confutil.MetadataProvenanceModeDisabled prm = confutil.MetadataProvenanceModeDisabled
@ -254,7 +302,117 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
return wrapBuildError(err, true) return wrapBuildError(err, true)
} }
return err = printer.Wait()
if err != nil {
return err
}
var callFormatJSON bool
var jsonResults = map[string]map[string]any{}
if callFunc != nil {
callFormatJSON = callFunc.Format == "json"
}
var sep bool
var exitCode int
names := make([]string, 0, len(bo))
for name := range bo {
names = append(names, name)
}
slices.Sort(names)
for _, name := range names {
req := bo[name]
if req.PrintFunc == nil {
continue
}
pf := &pb.PrintFunc{
Name: req.PrintFunc.Name,
Format: req.PrintFunc.Format,
IgnoreStatus: req.PrintFunc.IgnoreStatus,
}
if callFunc != nil {
pf.Format = callFunc.Format
pf.IgnoreStatus = callFunc.IgnoreStatus
}
var res map[string]string
if sp, ok := resp[name]; ok {
res = sp.ExporterResponse
}
if callFormatJSON {
jsonResults[name] = map[string]any{}
buf := &bytes.Buffer{}
if code, err := printResult(buf, pf, res); err != nil {
jsonResults[name]["error"] = err.Error()
exitCode = 1
} else if code != 0 && exitCode == 0 {
exitCode = code
}
m := map[string]*json.RawMessage{}
if err := json.Unmarshal(buf.Bytes(), &m); err == nil {
for k, v := range m {
jsonResults[name][k] = v
}
} else {
jsonResults[name][pf.Name] = json.RawMessage(buf.Bytes())
}
} else {
if sep {
fmt.Fprintln(dockerCli.Out())
} else {
sep = true
}
fmt.Fprintf(dockerCli.Out(), "%s\n", name)
if descr := tgts[name].Description; descr != "" {
fmt.Fprintf(dockerCli.Out(), "%s\n", descr)
}
fmt.Fprintln(dockerCli.Out())
if code, err := printResult(dockerCli.Out(), pf, res); err != nil {
fmt.Fprintf(dockerCli.Out(), "error: %v\n", err)
exitCode = 1
} else if code != 0 && exitCode == 0 {
exitCode = code
}
}
}
if callFormatJSON {
out := struct {
Group map[string]*bake.Group `json:"group,omitempty"`
Target map[string]map[string]any `json:"target"`
}{
Group: grps,
Target: map[string]map[string]any{},
}
for name, def := range tgts {
out.Target[name] = map[string]any{
"build": def,
}
if res, ok := jsonResults[name]; ok {
printName := bo[name].PrintFunc.Name
if printName == "lint" {
printName = "check"
}
out.Target[name][printName] = res
}
}
dt, err := json.MarshalIndent(out, "", " ")
if err != nil {
return err
}
fmt.Fprintln(dockerCli.Out(), string(dt))
}
if exitCode != 0 {
os.Exit(exitCode)
}
return nil
} }
func bakeCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command { func bakeCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
@ -290,6 +448,18 @@ func bakeCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
flags.StringVar(&options.sbom, "sbom", "", `Shorthand for "--set=*.attest=type=sbom"`) flags.StringVar(&options.sbom, "sbom", "", `Shorthand for "--set=*.attest=type=sbom"`)
flags.StringVar(&options.provenance, "provenance", "", `Shorthand for "--set=*.attest=type=provenance"`) flags.StringVar(&options.provenance, "provenance", "", `Shorthand for "--set=*.attest=type=provenance"`)
flags.StringArrayVar(&options.overrides, "set", nil, `Override target value (e.g., "targetpattern.key=value")`) flags.StringArrayVar(&options.overrides, "set", nil, `Override target value (e.g., "targetpattern.key=value")`)
flags.StringVar(&options.callFunc, "call", "build", `Set method for evaluating build ("check", "outline", "targets")`)
flags.VarPF(callAlias(&options.callFunc, "check"), "check", "", `Shorthand for "--call=check"`)
flags.Lookup("check").NoOptDefVal = "true"
flags.BoolVar(&options.listTargets, "list-targets", false, "List available targets")
cobrautil.MarkFlagsExperimental(flags, "list-targets")
flags.MarkHidden("list-targets")
flags.BoolVar(&options.listVars, "list-variables", false, "List defined variables")
cobrautil.MarkFlagsExperimental(flags, "list-variables")
flags.MarkHidden("list-variables")
commonBuildFlags(&cFlags, flags) commonBuildFlags(&cFlags, flags)
@ -346,3 +516,75 @@ func readBakeFiles(ctx context.Context, nodes []builder.Node, url string, names
return return
} }
func printVars(w io.Writer, vars []*hclparser.Variable) error {
slices.SortFunc(vars, func(a, b *hclparser.Variable) int {
return cmp.Compare(a.Name, b.Name)
})
tw := tabwriter.NewWriter(w, 1, 8, 1, '\t', 0)
defer tw.Flush()
tw.Write([]byte("VARIABLE\tVALUE\tDESCRIPTION\n"))
for _, v := range vars {
var value string
if v.Value != nil {
value = *v.Value
} else {
value = "<null>"
}
fmt.Fprintf(tw, "%s\t%s\t%s\n", v.Name, value, v.Description)
}
return nil
}
func printTargetList(w io.Writer, cfg *bake.Config) error {
tw := tabwriter.NewWriter(w, 1, 8, 1, '\t', 0)
defer tw.Flush()
tw.Write([]byte("TARGET\tDESCRIPTION\n"))
type targetOrGroup struct {
name string
target *bake.Target
group *bake.Group
}
list := make([]targetOrGroup, 0, len(cfg.Targets)+len(cfg.Groups))
for _, tgt := range cfg.Targets {
list = append(list, targetOrGroup{name: tgt.Name, target: tgt})
}
for _, grp := range cfg.Groups {
list = append(list, targetOrGroup{name: grp.Name, group: grp})
}
slices.SortFunc(list, func(a, b targetOrGroup) int {
return cmp.Compare(a.name, b.name)
})
for _, tgt := range list {
if strings.HasPrefix(tgt.name, "_") {
// convention for a private target
continue
}
var descr string
if tgt.target != nil {
descr = tgt.target.Description
} else if tgt.group != nil {
descr = tgt.group.Description
if len(tgt.group.Targets) > 0 {
slices.Sort(tgt.group.Targets)
names := strings.Join(tgt.group.Targets, ", ")
if descr != "" {
descr += " (" + names + ")"
} else {
descr = names
}
}
}
fmt.Fprintf(tw, "%s\t%s\n", tgt.name, descr)
}
return nil
}

View File

@ -9,7 +9,6 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"log"
"os" "os"
"path/filepath" "path/filepath"
"strconv" "strconv"
@ -369,8 +368,10 @@ func runBuild(ctx context.Context, dockerCli command.Cli, options buildOptions)
} }
} }
if opts.PrintFunc != nil { if opts.PrintFunc != nil {
if err := printResult(opts.PrintFunc, resp.ExporterResponse); err != nil { if exitcode, err := printResult(dockerCli.Out(), opts.PrintFunc, resp.ExporterResponse); err != nil {
return err return err
} else if exitcode != 0 {
os.Exit(exitcode)
} }
} else if options.metadataFile != "" { } else if options.metadataFile != "" {
dt := decodeExporterResponse(resp.ExporterResponse) dt := decodeExporterResponse(resp.ExporterResponse)
@ -634,7 +635,7 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
} }
flags.StringVar(&options.printFunc, "call", "build", `Set method for evaluating build ("check", "outline", "targets")`) flags.StringVar(&options.printFunc, "call", "build", `Set method for evaluating build ("check", "outline", "targets")`)
flags.VarPF(callAlias(options, "check"), "check", "", `Shorthand for "--call=check"`) flags.VarPF(callAlias(&options.printFunc, "check"), "check", "", `Shorthand for "--call=check"`)
flags.Lookup("check").NoOptDefVal = "true" flags.Lookup("check").NoOptDefVal = "true"
// hidden flags // hidden flags
@ -862,24 +863,24 @@ func printWarnings(w io.Writer, warnings []client.VertexWarning, mode progressui
} }
} }
func printResult(f *controllerapi.PrintFunc, res map[string]string) error { func printResult(w io.Writer, f *controllerapi.PrintFunc, res map[string]string) (int, error) {
switch f.Name { switch f.Name {
case "outline": case "outline":
return printValue(outline.PrintOutline, outline.SubrequestsOutlineDefinition.Version, f.Format, res) return 0, printValue(w, outline.PrintOutline, outline.SubrequestsOutlineDefinition.Version, f.Format, res)
case "targets": case "targets":
return printValue(targets.PrintTargets, targets.SubrequestsTargetsDefinition.Version, f.Format, res) return 0, printValue(w, targets.PrintTargets, targets.SubrequestsTargetsDefinition.Version, f.Format, res)
case "subrequests.describe": case "subrequests.describe":
return printValue(subrequests.PrintDescribe, subrequests.SubrequestsDescribeDefinition.Version, f.Format, res) return 0, printValue(w, subrequests.PrintDescribe, subrequests.SubrequestsDescribeDefinition.Version, f.Format, res)
case "lint": case "lint":
err := printValue(lint.PrintLintViolations, lint.SubrequestLintDefinition.Version, f.Format, res) err := printValue(w, lint.PrintLintViolations, lint.SubrequestLintDefinition.Version, f.Format, res)
if err != nil { if err != nil {
return err return 0, err
} }
lintResults := lint.LintResults{} lintResults := lint.LintResults{}
if result, ok := res["result.json"]; ok { if result, ok := res["result.json"]; ok {
if err := json.Unmarshal([]byte(result), &lintResults); err != nil { if err := json.Unmarshal([]byte(result), &lintResults); err != nil {
return err return 0, err
} }
} }
if lintResults.Error != nil { if lintResults.Error != nil {
@ -889,52 +890,51 @@ func printResult(f *controllerapi.PrintFunc, res map[string]string) error {
// but here we want to print the error in a way that's consistent with how // but here we want to print the error in a way that's consistent with how
// the lint warnings are printed via the `lint.PrintLintViolations` function, // the lint warnings are printed via the `lint.PrintLintViolations` function,
// which differs from the default error printing. // which differs from the default error printing.
fmt.Println() if f.Format != "json" && len(lintResults.Warnings) > 0 {
lintBuf := bytes.NewBuffer([]byte(lintResults.Error.Message)) fmt.Fprintln(w)
if f.Format != "json" {
fmt.Fprintln(lintBuf)
} }
lintBuf := bytes.NewBuffer([]byte(lintResults.Error.Message + "\n"))
sourceInfo := lintResults.Sources[lintResults.Error.Location.SourceIndex] sourceInfo := lintResults.Sources[lintResults.Error.Location.SourceIndex]
source := errdefs.Source{ source := errdefs.Source{
Info: sourceInfo, Info: sourceInfo,
Ranges: lintResults.Error.Location.Ranges, Ranges: lintResults.Error.Location.Ranges,
} }
source.Print(lintBuf) source.Print(lintBuf)
return errors.New(lintBuf.String()) return 0, errors.New(lintBuf.String())
} else if len(lintResults.Warnings) == 0 && f.Format != "json" { } else if len(lintResults.Warnings) == 0 && f.Format != "json" {
fmt.Println("Check complete, no warnings found.") fmt.Fprintln(w, "Check complete, no warnings found.")
} }
default: default:
if dt, ok := res["result.json"]; ok && f.Format == "json" { if dt, ok := res["result.json"]; ok && f.Format == "json" {
fmt.Println(dt) fmt.Fprintln(w, dt)
} else if dt, ok := res["result.txt"]; ok { } else if dt, ok := res["result.txt"]; ok {
fmt.Print(dt) fmt.Fprint(w, dt)
} else { } else {
log.Printf("%s %+v", f, res) fmt.Fprintf(w, "%s %+v\n", f, res)
} }
} }
if v, ok := res["result.statuscode"]; !f.IgnoreStatus && ok { if v, ok := res["result.statuscode"]; !f.IgnoreStatus && ok {
if n, err := strconv.Atoi(v); err == nil && n != 0 { if n, err := strconv.Atoi(v); err == nil && n != 0 {
os.Exit(n) return n, nil
} }
} }
return nil return 0, nil
} }
type printFunc func([]byte, io.Writer) error type printFunc func([]byte, io.Writer) error
func printValue(printer printFunc, version string, format string, res map[string]string) error { func printValue(w io.Writer, printer printFunc, version string, format string, res map[string]string) error {
if format == "json" { if format == "json" {
fmt.Fprintln(os.Stdout, res["result.json"]) fmt.Fprintln(w, res["result.json"])
return nil return nil
} }
if res["version"] != "" && versions.LessThan(version, res["version"]) && res["result.txt"] != "" { if res["version"] != "" && versions.LessThan(version, res["version"]) && res["result.txt"] != "" {
// structure is too new and we don't know how to print it // structure is too new and we don't know how to print it
fmt.Fprint(os.Stdout, res["result.txt"]) fmt.Fprint(w, res["result.txt"])
return nil return nil
} }
return printer([]byte(res["result.json"]), os.Stdout) return printer([]byte(res["result.json"]), w)
} }
type invokeConfig struct { type invokeConfig struct {
@ -1042,7 +1042,7 @@ func maybeJSONArray(v string) []string {
return []string{v} return []string{v}
} }
func callAlias(options *buildOptions, value string) cobrautil.BoolFuncValue { func callAlias(target *string, value string) cobrautil.BoolFuncValue {
return func(s string) error { return func(s string) error {
v, err := strconv.ParseBool(s) v, err := strconv.ParseBool(s)
if err != nil { if err != nil {
@ -1050,7 +1050,7 @@ func callAlias(options *buildOptions, value string) cobrautil.BoolFuncValue {
} }
if v { if v {
options.printFunc = value *target = value
} }
return nil return nil
} }

View File

@ -16,6 +16,8 @@ Build from a file
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:------------------------------------|:--------------|:--------|:----------------------------------------------------------------------------------------------------| |:------------------------------------|:--------------|:--------|:----------------------------------------------------------------------------------------------------|
| [`--builder`](#builder) | `string` | | Override the configured builder instance | | [`--builder`](#builder) | `string` | | Override the configured builder instance |
| `--call` | `string` | `build` | Set method for evaluating build (`check`, `outline`, `targets`) |
| `--check` | `bool` | | Shorthand for `--call=check` |
| [`-f`](#file), [`--file`](#file) | `stringArray` | | Build definition file | | [`-f`](#file), [`--file`](#file) | `stringArray` | | Build definition file |
| `--load` | `bool` | | Shorthand for `--set=*.output=type=docker` | | `--load` | `bool` | | Shorthand for `--set=*.output=type=docker` |
| [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to a file | | [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to a file |

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"os" "os"
"path/filepath" "path/filepath"
"strings"
"testing" "testing"
"github.com/containerd/continuity/fs/fstest" "github.com/containerd/continuity/fs/fstest"
@ -48,6 +49,10 @@ var bakeTests = []func(t *testing.T, sb integration.Sandbox){
testBakeMetadataWarningsDedup, testBakeMetadataWarningsDedup,
testBakeMultiExporters, testBakeMultiExporters,
testBakeLoadPush, testBakeLoadPush,
testListTargets,
testListVariables,
testBakeCallCheck,
testBakeCallCheckFlag,
} }
func testBakeLocal(t *testing.T, sb integration.Sandbox) { func testBakeLocal(t *testing.T, sb integration.Sandbox) {
@ -951,3 +956,163 @@ target "default" {
// TODO: test metadata file when supported by multi exporters https://github.com/docker/buildx/issues/2181 // TODO: test metadata file when supported by multi exporters https://github.com/docker/buildx/issues/2181
} }
func testListTargets(t *testing.T, sb integration.Sandbox) {
bakefile := []byte(`
target "foo" {
description = "This builds foo"
}
target "abc" {
}
`)
dir := tmpdir(
t,
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
)
out, err := bakeCmd(
sb,
withDir(dir),
withArgs("--list-targets"),
)
require.NoError(t, err, out)
require.Equal(t, "TARGET\tDESCRIPTION\nabc\t\nfoo\tThis builds foo", strings.TrimSpace(out))
}
func testListVariables(t *testing.T, sb integration.Sandbox) {
bakefile := []byte(`
variable "foo" {
default = "bar"
description = "This is foo"
}
variable "abc" {
default = null
}
variable "def" {
}
target "default" {
}
`)
dir := tmpdir(
t,
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
)
out, err := bakeCmd(
sb,
withDir(dir),
withArgs("--list-variables"),
)
require.NoError(t, err, out)
require.Equal(t, "VARIABLE\tVALUE\tDESCRIPTION\nabc\t\t<null>\t\ndef\t\t\t\nfoo\t\tbar\tThis is foo", strings.TrimSpace(out))
}
func testBakeCallCheck(t *testing.T, sb integration.Sandbox) {
dockerfile := []byte(`
FROM scratch
COPy foo /foo
`)
bakefile := []byte(`
target "validate" {
call = "check"
}
`)
dir := tmpdir(
t,
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
fstest.CreateFile("Dockerfile", dockerfile, 0600),
)
out, err := bakeCmd(
sb,
withDir(dir),
withArgs("validate"),
)
require.Error(t, err, out)
require.Contains(t, out, "validate")
require.Contains(t, out, "ConsistentInstructionCasing")
}
func testBakeCallCheckFlag(t *testing.T, sb integration.Sandbox) {
dockerfile := []byte(`
FROM scratch
COPy foo /foo
`)
dockerfile2 := []byte(`
FROM scratch
COPY foo$BAR /foo
`)
bakefile := []byte(`
target "build" {
dockerfile = "a.Dockerfile"
}
target "another" {
dockerfile = "b.Dockerfile"
}
`)
dir := tmpdir(
t,
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
fstest.CreateFile("a.Dockerfile", dockerfile, 0600),
fstest.CreateFile("b.Dockerfile", dockerfile2, 0600),
)
out, err := bakeCmd(
sb,
withDir(dir),
withArgs("build", "another", "--check"),
)
require.Error(t, err, out)
require.Contains(t, out, "build")
require.Contains(t, out, "ConsistentInstructionCasing")
require.Contains(t, out, "another")
require.Contains(t, out, "UndefinedVar")
out, err = bakeCmd(
sb,
withDir(dir),
withArgs("build", "another", "--call", "check,format=json"),
)
require.Error(t, err, out)
var res map[string]any
err = json.Unmarshal([]byte(out), &res)
require.NoError(t, err, out)
targets, ok := res["target"].(map[string]any)
require.True(t, ok)
build, ok := targets["build"].(map[string]any)
require.True(t, ok)
_, ok = build["build"]
require.True(t, ok)
check, ok := build["check"].(map[string]any)
require.True(t, ok)
warnings, ok := check["warnings"].([]any)
require.True(t, ok)
require.Len(t, warnings, 1)
another, ok := targets["another"].(map[string]any)
require.True(t, ok)
_, ok = another["build"]
require.True(t, ok)
check, ok = another["check"].(map[string]any)
require.True(t, ok)
warnings, ok = check["warnings"].([]any)
require.True(t, ok)
require.Len(t, warnings, 1)
}

View File

@ -19,9 +19,10 @@ import (
type Printer struct { type Printer struct {
status chan *client.SolveStatus status chan *client.SolveStatus
ready chan struct{} ready chan struct{}
done chan struct{} done chan struct{}
paused chan struct{} paused chan struct{}
closeOnce sync.Once
err error err error
warnings []client.VertexWarning warnings []client.VertexWarning
@ -36,8 +37,10 @@ type Printer struct {
} }
func (p *Printer) Wait() error { func (p *Printer) Wait() error {
close(p.status) p.closeOnce.Do(func() {
<-p.done close(p.status)
<-p.done
})
return p.err return p.err
} }