Compare commits

..

9 Commits

Author SHA1 Message Date
Tõnis Tiigi
245093b99a Merge pull request #2945 from tonistiigi/v0.20.1-cherry-picks
[v0.20.1] cherry picks
2025-01-22 13:41:17 -08:00
CrazyMax
e2ed15f0c9 ci: use main branch for docs upstream validation workflow
Signed-off-by: CrazyMax <1951866+crazy-max@users.noreply.github.com>
(cherry picked from commit aa1fbc0421)
2025-01-22 13:11:38 -08:00
David Karlsson
fd442f8e10 docs: add docs for bake --allow
Signed-off-by: David Karlsson <35727626+dvdksn@users.noreply.github.com>
(cherry picked from commit 012df71b63)
2025-01-22 13:10:12 -08:00
David Karlsson
1002e6fb42 docs(bake): improve docs on "call" and "description" in bake file
Signed-off-by: David Karlsson <35727626+dvdksn@users.noreply.github.com>
(cherry picked from commit a26bb271ab)
2025-01-22 13:09:58 -08:00
Jonathan A. Sternberg
d5ad869033 buildflags: fix ref only format for command line and bake
Signed-off-by: Jonathan A. Sternberg <jonathan.sternberg@docker.com>
(cherry picked from commit 11c84973ef)
2025-01-22 13:00:20 -08:00
Tõnis Tiigi
bd7090b981 Merge pull request #2940 from crazy-max/0.20.1-picks
[v0.20] v0.20.1 cherry-picks
2025-01-22 09:03:22 -08:00
Jonathan A. Sternberg
24f3a1df80 buildflags: marshal attestations into json with extra attributes correctly
`MarshalJSON` would not include the extra attributes because it iterated
over the target map rather than the source map.

Also fixes JSON unmarshaling for SSH and secrets. The intention was to
unmarshal into the struct, but `UnmarshalText` takes priority over the
default struct unmarshaling so it didn't work as intended.

Tests have been added for all marshaling and unmarshaling methods.

Signed-off-by: Jonathan A. Sternberg <jonathan.sternberg@docker.com>
2025-01-22 13:43:36 +01:00
CrazyMax
8e30c4669c Merge pull request #2933 from crazy-max/v0.20_backport_buildkit-0.19.0
[v0.20 backport] vendor: update buildkit to v0.19.0
2025-01-20 19:29:35 +01:00
CrazyMax
cf74356afc vendor: update buildkit to v0.19.0
Signed-off-by: CrazyMax <1951866+crazy-max@users.noreply.github.com>
2025-01-20 18:56:37 +01:00
929 changed files with 40641 additions and 56523 deletions

View File

@@ -54,9 +54,9 @@ jobs:
- master - master
- latest - latest
- buildx-stable-1 - buildx-stable-1
- v0.20.2 - v0.19.0-rc2
- v0.19.0
- v0.18.2 - v0.18.2
- v0.17.2
worker: worker:
- docker-container - docker-container
- remote - remote
@@ -76,16 +76,6 @@ jobs:
- worker: docker+containerd # same as docker, but with containerd snapshotter - worker: docker+containerd # same as docker, but with containerd snapshotter
pkg: ./tests pkg: ./tests
mode: experimental mode: experimental
- worker: "docker@27.5"
pkg: ./tests
- worker: "docker+containerd@27.5" # same as docker, but with containerd snapshotter
pkg: ./tests
- worker: "docker@27.5"
pkg: ./tests
mode: experimental
- worker: "docker+containerd@27.5" # same as docker, but with containerd snapshotter
pkg: ./tests
mode: experimental
- worker: "docker@26.1" - worker: "docker@26.1"
pkg: ./tests pkg: ./tests
- worker: "docker+containerd@26.1" # same as docker, but with containerd snapshotter - worker: "docker+containerd@26.1" # same as docker, but with containerd snapshotter
@@ -184,11 +174,6 @@ jobs:
env: env:
SKIP_INTEGRATION_TESTS: 1 SKIP_INTEGRATION_TESTS: 1
steps: steps:
-
name: Setup Git config
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- -
name: Checkout name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -258,17 +243,12 @@ jobs:
matrix: matrix:
os: os:
- freebsd - freebsd
- netbsd
- openbsd - openbsd
steps: steps:
- -
name: Prepare name: Prepare
run: | run: |
echo "VAGRANT_FILE=hack/Vagrantfile.${{ matrix.os }}" >> $GITHUB_ENV echo "VAGRANT_FILE=hack/Vagrantfile.${{ matrix.os }}" >> $GITHUB_ENV
# Sets semver Go version to be able to download tarball during vagrant setup
goVersion=$(curl --silent "https://go.dev/dl/?mode=json&include=all" | jq -r '.[].files[].version' | uniq | sed -e 's/go//' | sort -V | grep $GO_VERSION | tail -1)
echo "GO_VERSION=$goVersion" >> $GITHUB_ENV
- -
name: Checkout name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -284,10 +264,8 @@ jobs:
name: Install vagrant name: Install vagrant
run: | run: |
set -x set -x
wget -O - https://apt.releases.hashicorp.com/gpg | sudo gpg --dearmor -o /usr/share/keyrings/hashicorp-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list
sudo apt-get update sudo apt-get update
sudo apt-get install -y libvirt-dev libvirt-daemon libvirt-daemon-system vagrant vagrant-libvirt ruby-libvirt sudo apt-get install -y libvirt-daemon libvirt-daemon-system vagrant vagrant-libvirt ruby-libvirt
sudo systemctl enable --now libvirtd sudo systemctl enable --now libvirtd
sudo chmod a+rw /var/run/libvirt/libvirt-sock sudo chmod a+rw /var/run/libvirt/libvirt-sock
vagrant plugin install vagrant-libvirt vagrant plugin install vagrant-libvirt
@@ -411,15 +389,6 @@ jobs:
- test-unit - test-unit
if: ${{ github.event_name != 'pull_request' && github.repository == 'docker/buildx' }} if: ${{ github.event_name != 'pull_request' && github.repository == 'docker/buildx' }}
steps: steps:
-
name: Free disk space
uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1
with:
android: true
dotnet: true
haskell: true
large-packages: true
swap-storage: true
- -
name: Set up QEMU name: Set up QEMU
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v3

View File

@@ -77,7 +77,7 @@ jobs:
VENDOR_MODULE: github.com/docker/buildx@${{ env.RELEASE_NAME }} VENDOR_MODULE: github.com/docker/buildx@${{ env.RELEASE_NAME }}
- -
name: Create PR on docs repo name: Create PR on docs repo
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 uses: peter-evans/create-pull-request@67ccf781d68cd99b580ae25a5c18a1cc84ffff1f # v7.0.6
with: with:
token: ${{ secrets.GHPAT_DOCS_DISPATCH }} token: ${{ secrets.GHPAT_DOCS_DISPATCH }}
push-to-fork: docker-tools-robot/docker.github.io push-to-fork: docker-tools-robot/docker.github.io

View File

@@ -29,7 +29,7 @@ env:
SETUP_BUILDX_VERSION: "edge" SETUP_BUILDX_VERSION: "edge"
SETUP_BUILDKIT_IMAGE: "moby/buildkit:latest" SETUP_BUILDKIT_IMAGE: "moby/buildkit:latest"
DESTDIR: "./bin" DESTDIR: "./bin"
K3S_VERSION: "v1.32.2+k3s1" K3S_VERSION: "v1.21.2-k3s1"
jobs: jobs:
build: build:
@@ -65,7 +65,7 @@ jobs:
retention-days: 7 retention-days: 7
driver: driver:
runs-on: ubuntu-24.04 runs-on: ubuntu-20.04
needs: needs:
- build - build
strategy: strategy:
@@ -153,7 +153,7 @@ jobs:
- -
name: Install k3s name: Install k3s
if: matrix.driver == 'kubernetes' if: matrix.driver == 'kubernetes'
uses: crazy-max/.github/.github/actions/install-k3s@7730d1434364d4b9aded32735b078a7ace5ea79a uses: crazy-max/.github/.github/actions/install-k3s@fa6141aedf23596fb8bdcceab9cce8dadaa31bd9
with: with:
version: ${{ env.K3S_VERSION }} version: ${{ env.K3S_VERSION }}
- -
@@ -215,9 +215,6 @@ jobs:
- -
name: Checkout name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
-
name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v3
- -
name: Environment variables name: Environment variables
if: matrix.envs != '' if: matrix.envs != ''

View File

@@ -1,6 +1,9 @@
run: run:
timeout: 30m timeout: 30m
modules-download-mode: vendor modules-download-mode: vendor
# default uses Go version from the go.mod file, fallback on the env var
# `GOVERSION`, fallback on 1.17: https://golangci-lint.run/usage/configuration/#run-configuration
go: "1.23"
linters: linters:
enable: enable:
@@ -40,9 +43,6 @@ linters-settings:
# buildkit errdefs package (or vice-versa). # buildkit errdefs package (or vice-versa).
- pkg: "github.com/containerd/errdefs" - pkg: "github.com/containerd/errdefs"
alias: "cerrdefs" alias: "cerrdefs"
# Use a consistent alias to prevent confusion with "github.com/moby/buildkit/client"
- pkg: "github.com/docker/docker/client"
alias: "dockerclient"
- pkg: "github.com/opencontainers/image-spec/specs-go/v1" - pkg: "github.com/opencontainers/image-spec/specs-go/v1"
alias: "ocispecs" alias: "ocispecs"
- pkg: "github.com/opencontainers/go-digest" - pkg: "github.com/opencontainers/go-digest"

View File

@@ -5,23 +5,20 @@ ARG ALPINE_VERSION=3.21
ARG XX_VERSION=1.6.1 ARG XX_VERSION=1.6.1
# for testing # for testing
ARG DOCKER_VERSION=28.0.0 ARG DOCKER_VERSION=27.5.0
ARG DOCKER_VERSION_ALT_27=27.5.1
ARG DOCKER_VERSION_ALT_26=26.1.3 ARG DOCKER_VERSION_ALT_26=26.1.3
ARG DOCKER_CLI_VERSION=${DOCKER_VERSION} ARG DOCKER_CLI_VERSION=${DOCKER_VERSION}
ARG GOTESTSUM_VERSION=v1.12.0 ARG GOTESTSUM_VERSION=v1.12.0
ARG REGISTRY_VERSION=2.8.3 ARG REGISTRY_VERSION=2.8.3
ARG BUILDKIT_VERSION=v0.20.2 ARG BUILDKIT_VERSION=v0.19.0-rc2
ARG UNDOCK_VERSION=0.9.0 ARG UNDOCK_VERSION=0.9.0
FROM --platform=$BUILDPLATFORM tonistiigi/xx:${XX_VERSION} AS xx FROM --platform=$BUILDPLATFORM tonistiigi/xx:${XX_VERSION} AS xx
FROM --platform=$BUILDPLATFORM golang:${GO_VERSION}-alpine${ALPINE_VERSION} AS golatest FROM --platform=$BUILDPLATFORM golang:${GO_VERSION}-alpine${ALPINE_VERSION} AS golatest
FROM moby/moby-bin:$DOCKER_VERSION AS docker-engine FROM moby/moby-bin:$DOCKER_VERSION AS docker-engine
FROM dockereng/cli-bin:$DOCKER_CLI_VERSION AS docker-cli FROM dockereng/cli-bin:$DOCKER_CLI_VERSION AS docker-cli
FROM moby/moby-bin:$DOCKER_VERSION_ALT_27 AS docker-engine-alt27 FROM moby/moby-bin:$DOCKER_VERSION_ALT_26 AS docker-engine-alt
FROM moby/moby-bin:$DOCKER_VERSION_ALT_26 AS docker-engine-alt26 FROM dockereng/cli-bin:$DOCKER_VERSION_ALT_26 AS docker-cli-alt
FROM dockereng/cli-bin:$DOCKER_VERSION_ALT_27 AS docker-cli-alt27
FROM dockereng/cli-bin:$DOCKER_VERSION_ALT_26 AS docker-cli-alt26
FROM registry:$REGISTRY_VERSION AS registry FROM registry:$REGISTRY_VERSION AS registry
FROM moby/buildkit:$BUILDKIT_VERSION AS buildkit FROM moby/buildkit:$BUILDKIT_VERSION AS buildkit
FROM crazymax/undock:$UNDOCK_VERSION AS undock FROM crazymax/undock:$UNDOCK_VERSION AS undock
@@ -105,7 +102,6 @@ COPY --link --from=buildx-build /usr/bin/docker-buildx /buildx
FROM binaries-unix AS binaries-darwin FROM binaries-unix AS binaries-darwin
FROM binaries-unix AS binaries-freebsd FROM binaries-unix AS binaries-freebsd
FROM binaries-unix AS binaries-linux FROM binaries-unix AS binaries-linux
FROM binaries-unix AS binaries-netbsd
FROM binaries-unix AS binaries-openbsd FROM binaries-unix AS binaries-openbsd
FROM scratch AS binaries-windows FROM scratch AS binaries-windows
@@ -131,15 +127,13 @@ COPY --link --from=gotestsum /out /usr/bin/
COPY --link --from=registry /bin/registry /usr/bin/ COPY --link --from=registry /bin/registry /usr/bin/
COPY --link --from=docker-engine / /usr/bin/ COPY --link --from=docker-engine / /usr/bin/
COPY --link --from=docker-cli / /usr/bin/ COPY --link --from=docker-cli / /usr/bin/
COPY --link --from=docker-engine-alt27 / /opt/docker-alt-27/ COPY --link --from=docker-engine-alt / /opt/docker-alt-26/
COPY --link --from=docker-engine-alt26 / /opt/docker-alt-26/ COPY --link --from=docker-cli-alt / /opt/docker-alt-26/
COPY --link --from=docker-cli-alt27 / /opt/docker-alt-27/
COPY --link --from=docker-cli-alt26 / /opt/docker-alt-26/
COPY --link --from=buildkit /usr/bin/buildkitd /usr/bin/ COPY --link --from=buildkit /usr/bin/buildkitd /usr/bin/
COPY --link --from=buildkit /usr/bin/buildctl /usr/bin/ COPY --link --from=buildkit /usr/bin/buildctl /usr/bin/
COPY --link --from=undock /usr/local/bin/undock /usr/bin/ COPY --link --from=undock /usr/local/bin/undock /usr/bin/
COPY --link --from=binaries /buildx /usr/bin/ COPY --link --from=binaries /buildx /usr/bin/
ENV TEST_DOCKER_EXTRA="docker@27.5=/opt/docker-alt-27,docker@26.1=/opt/docker-alt-26" ENV TEST_DOCKER_EXTRA="docker@26.1=/opt/docker-alt-26"
FROM integration-test-base AS integration-test FROM integration-test-base AS integration-test
COPY . . COPY . .

View File

@@ -21,7 +21,7 @@
- [Verify essential information](#verify-essential-information) - [Verify essential information](#verify-essential-information)
- [Classify the issue](#classify-the-issue) - [Classify the issue](#classify-the-issue)
- [Prioritization guidelines for `kind/bug`](#prioritization-guidelines-for-kindbug) - [Prioritization guidelines for `kind/bug`](#prioritization-guidelines-for-kindbug)
- [Issue lifecycle](#issue-lifecycle) - [Issue lifecyle](#issue-lifecyle)
- [Examples](#examples) - [Examples](#examples)
- [Submitting a bug](#submitting-a-bug) - [Submitting a bug](#submitting-a-bug)
- [Pull request review process](#pull-request-review-process) - [Pull request review process](#pull-request-review-process)
@@ -308,7 +308,7 @@ Examples:
- Bugs in non-default configurations - Bugs in non-default configurations
- Most enhancements - Most enhancements
## Issue lifecycle ## Issue lifecyle
```mermaid ```mermaid
flowchart LR flowchart LR

View File

@@ -3,9 +3,7 @@ package bake
import ( import (
"context" "context"
"encoding" "encoding"
"encoding/json"
"io" "io"
"maps"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
@@ -29,6 +27,7 @@ import (
"github.com/moby/buildkit/client" "github.com/moby/buildkit/client"
"github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/client/llb"
"github.com/moby/buildkit/session/auth/authprovider" "github.com/moby/buildkit/session/auth/authprovider"
"github.com/moby/buildkit/util/entitlements"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert" "github.com/zclconf/go-cty/cty/convert"
@@ -47,7 +46,6 @@ type File struct {
type Override struct { type Override struct {
Value string Value string
ArrValue []string ArrValue []string
Append bool
} }
func defaultFilenames() []string { func defaultFilenames() []string {
@@ -488,8 +486,10 @@ func (c Config) loadLinks(name string, t *Target, m map[string]*Target, o map[st
if target == name { if target == name {
return errors.Errorf("target %s cannot link to itself", target) return errors.Errorf("target %s cannot link to itself", target)
} }
if slices.Contains(visited, target) { for _, v := range visited {
return errors.Errorf("infinite loop from %s to %s", name, target) if v == target {
return errors.Errorf("infinite loop from %s to %s", name, target)
}
} }
t2, ok := m[target] t2, ok := m[target]
if !ok { if !ok {
@@ -529,12 +529,9 @@ func (c Config) newOverrides(v []string) (map[string]map[string]Override, error)
m := map[string]map[string]Override{} m := map[string]map[string]Override{}
for _, v := range v { for _, v := range v {
parts := strings.SplitN(v, "=", 2) parts := strings.SplitN(v, "=", 2)
keys := strings.SplitN(parts[0], ".", 3)
skey := strings.TrimSuffix(parts[0], "+")
appendTo := strings.HasSuffix(parts[0], "+")
keys := strings.SplitN(skey, ".", 3)
if len(keys) < 2 { if len(keys) < 2 {
return nil, errors.Errorf("invalid override key %s, expected target.name", skey) return nil, errors.Errorf("invalid override key %s, expected target.name", parts[0])
} }
pattern := keys[0] pattern := keys[0]
@@ -547,7 +544,8 @@ func (c Config) newOverrides(v []string) (map[string]map[string]Override, error)
return nil, err return nil, err
} }
okey := strings.Join(keys[1:], ".") kk := strings.SplitN(parts[0], ".", 2)
for _, name := range names { for _, name := range names {
t, ok := m[name] t, ok := m[name]
if !ok { if !ok {
@@ -555,15 +553,14 @@ func (c Config) newOverrides(v []string) (map[string]map[string]Override, error)
m[name] = t m[name] = t
} }
override := t[okey] o := t[kk[1]]
// IMPORTANT: if you add more fields here, do not forget to update // IMPORTANT: if you add more fields here, do not forget to update
// docs/reference/buildx_bake.md (--set) and https://docs.docker.com/build/bake/overrides/ // docs/bake-reference.md and https://docs.docker.com/build/bake/overrides/
switch keys[1] { switch keys[1] {
case "output", "cache-to", "cache-from", "tags", "platform", "secrets", "ssh", "attest", "entitlements", "network", "annotations": case "output", "cache-to", "cache-from", "tags", "platform", "secrets", "ssh", "attest", "entitlements", "network":
if len(parts) == 2 { if len(parts) == 2 {
override.Append = appendTo o.ArrValue = append(o.ArrValue, parts[1])
override.ArrValue = append(override.ArrValue, parts[1])
} }
case "args": case "args":
if len(keys) != 3 { if len(keys) != 3 {
@@ -574,7 +571,7 @@ func (c Config) newOverrides(v []string) (map[string]map[string]Override, error)
if !ok { if !ok {
continue continue
} }
override.Value = v o.Value = v
} }
fallthrough fallthrough
case "contexts": case "contexts":
@@ -584,11 +581,11 @@ func (c Config) newOverrides(v []string) (map[string]map[string]Override, error)
fallthrough fallthrough
default: default:
if len(parts) == 2 { if len(parts) == 2 {
override.Value = parts[1] o.Value = parts[1]
} }
} }
t[okey] = override t[kk[1]] = o
} }
} }
return m, nil return m, nil
@@ -734,41 +731,6 @@ type Target struct {
linked bool linked bool
} }
func (t *Target) MarshalJSON() ([]byte, error) {
tgt := *t
esc := func(s string) string {
return strings.ReplaceAll(strings.ReplaceAll(s, "${", "$${"), "%{", "%%{")
}
tgt.Annotations = slices.Clone(t.Annotations)
for i, v := range tgt.Annotations {
tgt.Annotations[i] = esc(v)
}
if tgt.DockerfileInline != nil {
escaped := esc(*tgt.DockerfileInline)
tgt.DockerfileInline = &escaped
}
tgt.Labels = maps.Clone(t.Labels)
for k, v := range t.Labels {
if v != nil {
escaped := esc(*v)
tgt.Labels[k] = &escaped
}
}
tgt.Args = maps.Clone(t.Args)
for k, v := range t.Args {
if v != nil {
escaped := esc(*v)
tgt.Args[k] = &escaped
}
}
return json.Marshal(tgt)
}
var ( var (
_ hclparser.WithEvalContexts = &Target{} _ hclparser.WithEvalContexts = &Target{}
_ hclparser.WithGetName = &Target{} _ hclparser.WithGetName = &Target{}
@@ -935,21 +897,13 @@ func (t *Target) AddOverrides(overrides map[string]Override, ent *EntitlementCon
} }
t.Labels[keys[1]] = &value t.Labels[keys[1]] = &value
case "tags": case "tags":
if o.Append { t.Tags = o.ArrValue
t.Tags = append(t.Tags, o.ArrValue...)
} else {
t.Tags = o.ArrValue
}
case "cache-from": case "cache-from":
cacheFrom, err := buildflags.ParseCacheEntry(o.ArrValue) cacheFrom, err := buildflags.ParseCacheEntry(o.ArrValue)
if err != nil { if err != nil {
return err return err
} }
if o.Append { t.CacheFrom = cacheFrom
t.CacheFrom = t.CacheFrom.Merge(cacheFrom)
} else {
t.CacheFrom = cacheFrom
}
for _, c := range t.CacheFrom { for _, c := range t.CacheFrom {
if c.Type == "local" { if c.Type == "local" {
if v, ok := c.Attrs["src"]; ok { if v, ok := c.Attrs["src"]; ok {
@@ -962,11 +916,7 @@ func (t *Target) AddOverrides(overrides map[string]Override, ent *EntitlementCon
if err != nil { if err != nil {
return err return err
} }
if o.Append { t.CacheTo = cacheTo
t.CacheTo = t.CacheTo.Merge(cacheTo)
} else {
t.CacheTo = cacheTo
}
for _, c := range t.CacheTo { for _, c := range t.CacheTo {
if c.Type == "local" { if c.Type == "local" {
if v, ok := c.Attrs["dest"]; ok { if v, ok := c.Attrs["dest"]; ok {
@@ -983,11 +933,7 @@ func (t *Target) AddOverrides(overrides map[string]Override, ent *EntitlementCon
if err != nil { if err != nil {
return errors.Wrap(err, "invalid value for outputs") return errors.Wrap(err, "invalid value for outputs")
} }
if o.Append { t.Secrets = secrets
t.Secrets = t.Secrets.Merge(secrets)
} else {
t.Secrets = secrets
}
for _, s := range t.Secrets { for _, s := range t.Secrets {
if s.FilePath != "" { if s.FilePath != "" {
ent.FSRead = append(ent.FSRead, s.FilePath) ent.FSRead = append(ent.FSRead, s.FilePath)
@@ -998,30 +944,18 @@ func (t *Target) AddOverrides(overrides map[string]Override, ent *EntitlementCon
if err != nil { if err != nil {
return errors.Wrap(err, "invalid value for outputs") return errors.Wrap(err, "invalid value for outputs")
} }
if o.Append { t.SSH = ssh
t.SSH = t.SSH.Merge(ssh)
} else {
t.SSH = ssh
}
for _, s := range t.SSH { for _, s := range t.SSH {
ent.FSRead = append(ent.FSRead, s.Paths...) ent.FSRead = append(ent.FSRead, s.Paths...)
} }
case "platform": case "platform":
if o.Append { t.Platforms = o.ArrValue
t.Platforms = append(t.Platforms, o.ArrValue...)
} else {
t.Platforms = o.ArrValue
}
case "output": case "output":
outputs, err := parseArrValue[buildflags.ExportEntry](o.ArrValue) outputs, err := parseArrValue[buildflags.ExportEntry](o.ArrValue)
if err != nil { if err != nil {
return errors.Wrap(err, "invalid value for outputs") return errors.Wrap(err, "invalid value for outputs")
} }
if o.Append { t.Outputs = outputs
t.Outputs = t.Outputs.Merge(outputs)
} else {
t.Outputs = outputs
}
for _, o := range t.Outputs { for _, o := range t.Outputs {
if o.Destination != "" { if o.Destination != "" {
ent.FSWrite = append(ent.FSWrite, o.Destination) ent.FSWrite = append(ent.FSWrite, o.Destination)
@@ -1051,19 +985,11 @@ func (t *Target) AddOverrides(overrides map[string]Override, ent *EntitlementCon
} }
t.NoCache = &noCache t.NoCache = &noCache
case "no-cache-filter": case "no-cache-filter":
if o.Append { t.NoCacheFilter = o.ArrValue
t.NoCacheFilter = append(t.NoCacheFilter, o.ArrValue...)
} else {
t.NoCacheFilter = o.ArrValue
}
case "shm-size": case "shm-size":
t.ShmSize = &value t.ShmSize = &value
case "ulimits": case "ulimits":
if o.Append { t.Ulimits = o.ArrValue
t.Ulimits = append(t.Ulimits, o.ArrValue...)
} else {
t.Ulimits = o.ArrValue
}
case "network": case "network":
t.NetworkMode = &value t.NetworkMode = &value
case "pull": case "pull":
@@ -1141,7 +1067,9 @@ func (t *Target) GetEvalContexts(ectx *hcl.EvalContext, block *hcl.Block, loadDe
e2 := ectx.NewChild() e2 := ectx.NewChild()
e2.Variables = make(map[string]cty.Value) e2.Variables = make(map[string]cty.Value)
if e != ectx { if e != ectx {
maps.Copy(e2.Variables, e.Variables) for k, v := range e.Variables {
e2.Variables[k] = v
}
} }
e2.Variables[k] = v e2.Variables[k] = v
ectxs2 = append(ectxs2, e2) ectxs2 = append(ectxs2, e2)
@@ -1202,9 +1130,7 @@ func (t *Target) GetName(ectx *hcl.EvalContext, block *hcl.Block, loadDeps func(
func TargetsToBuildOpt(m map[string]*Target, inp *Input) (map[string]build.Options, error) { func TargetsToBuildOpt(m map[string]*Target, inp *Input) (map[string]build.Options, error) {
// make sure local credentials are loaded multiple times for different targets // make sure local credentials are loaded multiple times for different targets
dockerConfig := config.LoadDefaultConfigFile(os.Stderr) dockerConfig := config.LoadDefaultConfigFile(os.Stderr)
authProvider := authprovider.NewDockerAuthProvider(authprovider.DockerAuthProviderConfig{ authProvider := authprovider.NewDockerAuthProvider(dockerConfig, nil)
ConfigFile: dockerConfig,
})
m2 := make(map[string]build.Options, len(m)) m2 := make(map[string]build.Options, len(m))
for k, v := range m { for k, v := range m {
@@ -1506,7 +1432,9 @@ func toBuildOpt(t *Target, inp *Input) (*build.Options, error) {
} }
bo.Ulimits = ulimits bo.Ulimits = ulimits
bo.Allow = append(bo.Allow, t.Entitlements...) for _, ent := range t.Entitlements {
bo.Allow = append(bo.Allow, entitlements.Entitlement(ent))
}
return bo, nil return bo, nil
} }

View File

@@ -34,18 +34,6 @@ target "webapp" {
args = { args = {
VAR_BOTH = "webapp" VAR_BOTH = "webapp"
} }
annotations = [
"index,manifest:org.opencontainers.image.authors=dvdksn"
]
attest = [
"type=provenance,mode=max"
]
platforms = [
"linux/amd64"
]
secret = [
"id=FOO,env=FOO"
]
inherits = ["webDEP"] inherits = ["webDEP"]
}`), }`),
} }
@@ -127,31 +115,6 @@ target "webapp" {
}) })
}) })
t.Run("AnnotationsOverrides", func(t *testing.T) {
t.Parallel()
m, g, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.annotations=index,manifest:org.opencontainers.image.vendor=docker"}, nil, &EntitlementConf{})
require.NoError(t, err)
require.Equal(t, []string{"index,manifest:org.opencontainers.image.authors=dvdksn", "index,manifest:org.opencontainers.image.vendor=docker"}, m["webapp"].Annotations)
require.Equal(t, 1, len(g))
require.Equal(t, []string{"webapp"}, g["default"].Targets)
})
t.Run("AttestOverride", func(t *testing.T) {
m, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.attest=type=sbom"}, nil, &EntitlementConf{})
require.NoError(t, err)
require.Len(t, m["webapp"].Attest, 2)
require.Equal(t, "provenance", m["webapp"].Attest[0].Type)
require.Equal(t, "sbom", m["webapp"].Attest[1].Type)
})
t.Run("AttestAppend", func(t *testing.T) {
m, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.attest+=type=sbom"}, nil, &EntitlementConf{})
require.NoError(t, err)
require.Len(t, m["webapp"].Attest, 2)
require.Equal(t, "provenance", m["webapp"].Attest[0].Type)
require.Equal(t, "sbom", m["webapp"].Attest[1].Type)
})
t.Run("ContextOverride", func(t *testing.T) { t.Run("ContextOverride", func(t *testing.T) {
t.Parallel() t.Parallel()
_, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.context"}, nil, &EntitlementConf{}) _, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.context"}, nil, &EntitlementConf{})
@@ -173,49 +136,6 @@ target "webapp" {
require.Equal(t, []string{"webapp"}, g["default"].Targets) require.Equal(t, []string{"webapp"}, g["default"].Targets)
}) })
t.Run("PlatformOverride", func(t *testing.T) {
m, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.platform=linux/arm64"}, nil, &EntitlementConf{})
require.NoError(t, err)
require.Equal(t, []string{"linux/arm64"}, m["webapp"].Platforms)
})
t.Run("PlatformAppend", func(t *testing.T) {
m, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.platform+=linux/arm64"}, nil, &EntitlementConf{})
require.NoError(t, err)
require.Equal(t, []string{"linux/amd64", "linux/arm64"}, m["webapp"].Platforms)
})
t.Run("PlatformAppendMulti", func(t *testing.T) {
m, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.platform+=linux/arm64", "webapp.platform+=linux/riscv64"}, nil, &EntitlementConf{})
require.NoError(t, err)
require.Equal(t, []string{"linux/amd64", "linux/arm64", "linux/riscv64"}, m["webapp"].Platforms)
})
t.Run("PlatformAppendMultiLastOverride", func(t *testing.T) {
m, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.platform+=linux/arm64", "webapp.platform=linux/riscv64"}, nil, &EntitlementConf{})
require.NoError(t, err)
require.Equal(t, []string{"linux/arm64", "linux/riscv64"}, m["webapp"].Platforms)
})
t.Run("SecretsOverride", func(t *testing.T) {
t.Setenv("FOO", "foo")
t.Setenv("BAR", "bar")
m, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.secrets=id=BAR,env=BAR"}, nil, &EntitlementConf{})
require.NoError(t, err)
require.Len(t, m["webapp"].Secrets, 1)
require.Equal(t, "BAR", m["webapp"].Secrets[0].ID)
})
t.Run("SecretsAppend", func(t *testing.T) {
t.Setenv("FOO", "foo")
t.Setenv("BAR", "bar")
m, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.secrets+=id=BAR,env=BAR"}, nil, &EntitlementConf{})
require.NoError(t, err)
require.Len(t, m["webapp"].Secrets, 2)
require.Equal(t, "FOO", m["webapp"].Secrets[0].ID)
require.Equal(t, "BAR", m["webapp"].Secrets[1].ID)
})
t.Run("ShmSizeOverride", func(t *testing.T) { t.Run("ShmSizeOverride", func(t *testing.T) {
m, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.shm-size=256m"}, nil, &EntitlementConf{}) m, _, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{"webapp.shm-size=256m"}, nil, &EntitlementConf{})
require.NoError(t, err) require.NoError(t, err)
@@ -1886,8 +1806,8 @@ func TestHCLEntitlements(t *testing.T) {
require.Equal(t, "network.host", m["app"].Entitlements[1]) require.Equal(t, "network.host", m["app"].Entitlements[1])
require.Len(t, bo["app"].Allow, 2) require.Len(t, bo["app"].Allow, 2)
require.Equal(t, entitlements.EntitlementSecurityInsecure.String(), bo["app"].Allow[0]) require.Equal(t, entitlements.EntitlementSecurityInsecure, bo["app"].Allow[0])
require.Equal(t, entitlements.EntitlementNetworkHost.String(), bo["app"].Allow[1]) require.Equal(t, entitlements.EntitlementNetworkHost, bo["app"].Allow[1])
} }
func TestEntitlementsForNetHostCompose(t *testing.T) { func TestEntitlementsForNetHostCompose(t *testing.T) {
@@ -1926,7 +1846,7 @@ func TestEntitlementsForNetHostCompose(t *testing.T) {
require.Equal(t, "host", *m["app"].NetworkMode) require.Equal(t, "host", *m["app"].NetworkMode)
require.Len(t, bo["app"].Allow, 1) require.Len(t, bo["app"].Allow, 1)
require.Equal(t, entitlements.EntitlementNetworkHost.String(), bo["app"].Allow[0]) require.Equal(t, entitlements.EntitlementNetworkHost, bo["app"].Allow[0])
require.Equal(t, "host", bo["app"].NetworkMode) require.Equal(t, "host", bo["app"].NetworkMode)
} }
@@ -1957,7 +1877,7 @@ func TestEntitlementsForNetHost(t *testing.T) {
require.Equal(t, "host", *m["app"].NetworkMode) require.Equal(t, "host", *m["app"].NetworkMode)
require.Len(t, bo["app"].Allow, 1) require.Len(t, bo["app"].Allow, 1)
require.Equal(t, entitlements.EntitlementNetworkHost.String(), bo["app"].Allow[0]) require.Equal(t, entitlements.EntitlementNetworkHost, bo["app"].Allow[0])
require.Equal(t, "host", bo["app"].NetworkMode) require.Equal(t, "host", bo["app"].NetworkMode)
} }
@@ -2142,73 +2062,6 @@ target "app" {
}) })
} }
func TestVariableValidationConditionNull(t *testing.T) {
fp := File{
Name: "docker-bake.hcl",
Data: []byte(`
variable "PORT" {
default = 3000
validation {}
}
target "app" {
args = {
PORT = PORT
}
}
`),
}
_, _, err := ReadTargets(context.TODO(), []File{fp}, []string{"app"}, nil, nil, &EntitlementConf{})
require.Error(t, err)
require.Contains(t, err.Error(), "Condition expression must return either true or false, not null")
}
func TestVariableValidationConditionUnknownValue(t *testing.T) {
fp := File{
Name: "docker-bake.hcl",
Data: []byte(`
variable "PORT" {
default = 3000
validation {
condition = "foo"
}
}
target "app" {
args = {
PORT = PORT
}
}
`),
}
_, _, err := ReadTargets(context.TODO(), []File{fp}, []string{"app"}, nil, nil, &EntitlementConf{})
require.Error(t, err)
require.Contains(t, err.Error(), "Invalid condition result value: a bool is required")
}
func TestVariableValidationInvalidErrorMessage(t *testing.T) {
fp := File{
Name: "docker-bake.hcl",
Data: []byte(`
variable "FOO" {
default = 0
validation {
condition = FOO > 5
}
}
target "app" {
args = {
FOO = FOO
}
}
`),
}
_, _, err := ReadTargets(context.TODO(), []File{fp}, []string{"app"}, nil, nil, &EntitlementConf{})
require.Error(t, err)
require.Contains(t, err.Error(), "This check failed, but has an invalid error message")
}
// https://github.com/docker/buildx/issues/2822 // https://github.com/docker/buildx/issues/2822
func TestVariableEmpty(t *testing.T) { func TestVariableEmpty(t *testing.T) {
fp := File{ fp := File{

View File

@@ -92,9 +92,6 @@ func ParseCompose(cfgs []composetypes.ConfigFile, envs map[string]string) (*Conf
if s.Build.AdditionalContexts != nil { if s.Build.AdditionalContexts != nil {
additionalContexts = map[string]string{} additionalContexts = map[string]string{}
for k, v := range s.Build.AdditionalContexts { for k, v := range s.Build.AdditionalContexts {
if strings.HasPrefix(v, "service:") {
v = strings.Replace(v, "service:", "target:", 1)
}
additionalContexts[k] = v additionalContexts[k] = v
} }
} }
@@ -177,7 +174,6 @@ func ParseCompose(cfgs []composetypes.ConfigFile, envs map[string]string) (*Conf
CacheFrom: cacheFrom, CacheFrom: cacheFrom,
CacheTo: cacheTo, CacheTo: cacheTo,
NetworkMode: networkModeP, NetworkMode: networkModeP,
Platforms: s.Build.Platforms,
SSH: ssh, SSH: ssh,
Secrets: secrets, Secrets: secrets,
ShmSize: shmSize, ShmSize: shmSize,
@@ -218,7 +214,7 @@ func validateComposeFile(dt []byte, fn string) (bool, error) {
} }
func validateCompose(dt []byte, envs map[string]string) error { func validateCompose(dt []byte, envs map[string]string) error {
_, err := loader.LoadWithContext(context.Background(), composetypes.ConfigDetails{ _, err := loader.Load(composetypes.ConfigDetails{
ConfigFiles: []composetypes.ConfigFile{ ConfigFiles: []composetypes.ConfigFile{
{ {
Content: dt, Content: dt,
@@ -319,7 +315,7 @@ type (
stringArray []string stringArray []string
) )
func (sa *stringArray) UnmarshalYAML(unmarshal func(any) error) error { func (sa *stringArray) UnmarshalYAML(unmarshal func(interface{}) error) error {
var multi []string var multi []string
err := unmarshal(&multi) err := unmarshal(&multi)
if err != nil { if err != nil {
@@ -336,7 +332,7 @@ func (sa *stringArray) UnmarshalYAML(unmarshal func(any) error) error {
// composeExtTarget converts Compose build extension x-bake to bake Target // composeExtTarget converts Compose build extension x-bake to bake Target
// https://github.com/compose-spec/compose-spec/blob/master/spec.md#extension // https://github.com/compose-spec/compose-spec/blob/master/spec.md#extension
func (t *Target) composeExtTarget(exts map[string]any) error { func (t *Target) composeExtTarget(exts map[string]interface{}) error {
var xb xbake var xb xbake
ext, ok := exts["x-bake"] ext, ok := exts["x-bake"]

View File

@@ -463,21 +463,6 @@ services:
require.NoError(t, err) require.NoError(t, err)
} }
func TestPlatforms(t *testing.T) {
dt := []byte(`
services:
foo:
build:
context: .
platforms:
- linux/amd64
- linux/arm64
`)
c, err := ParseCompose([]composetypes.ConfigFile{{Content: dt}}, nil)
require.NoError(t, err)
require.Equal(t, []string{"linux/amd64", "linux/arm64"}, c.Targets[0].Platforms)
}
func newBool(val bool) *bool { func newBool(val bool) *bool {
b := val b := val
return &b return &b
@@ -813,37 +798,6 @@ services:
}) })
} }
func TestServiceContext(t *testing.T) {
dt := []byte(`
services:
base:
build:
dockerfile: baseapp.Dockerfile
command: ./entrypoint.sh
webapp:
build:
context: ./dir
additional_contexts:
base: service:base
`)
c, err := ParseCompose([]composetypes.ConfigFile{{Content: dt}}, nil)
require.NoError(t, err)
require.Equal(t, 1, len(c.Groups))
require.Equal(t, "default", c.Groups[0].Name)
sort.Strings(c.Groups[0].Targets)
require.Equal(t, []string{"base", "webapp"}, c.Groups[0].Targets)
require.Equal(t, 2, len(c.Targets))
sort.Slice(c.Targets, func(i, j int) bool {
return c.Targets[i].Name < c.Targets[j].Name
})
require.Equal(t, "webapp", c.Targets[1].Name)
require.Equal(t, map[string]string{"base": "target:base"}, c.Targets[1].Contexts)
}
// chdir changes the current working directory to the named directory, // chdir changes the current working directory to the named directory,
// and then restore the original working directory at the end of the test. // and then restore the original working directory at the end of the test.
func chdir(t *testing.T, dir string) { func chdir(t *testing.T, dir string) {

View File

@@ -20,7 +20,6 @@ import (
"github.com/moby/buildkit/util/entitlements" "github.com/moby/buildkit/util/entitlements"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"github.com/tonistiigi/go-csvvalue"
) )
type EntitlementKey string type EntitlementKey string
@@ -28,7 +27,6 @@ type EntitlementKey string
const ( const (
EntitlementKeyNetworkHost EntitlementKey = "network.host" EntitlementKeyNetworkHost EntitlementKey = "network.host"
EntitlementKeySecurityInsecure EntitlementKey = "security.insecure" EntitlementKeySecurityInsecure EntitlementKey = "security.insecure"
EntitlementKeyDevice EntitlementKey = "device"
EntitlementKeyFSRead EntitlementKey = "fs.read" EntitlementKeyFSRead EntitlementKey = "fs.read"
EntitlementKeyFSWrite EntitlementKey = "fs.write" EntitlementKeyFSWrite EntitlementKey = "fs.write"
EntitlementKeyFS EntitlementKey = "fs" EntitlementKeyFS EntitlementKey = "fs"
@@ -41,7 +39,6 @@ const (
type EntitlementConf struct { type EntitlementConf struct {
NetworkHost bool NetworkHost bool
SecurityInsecure bool SecurityInsecure bool
Devices *EntitlementsDevicesConf
FSRead []string FSRead []string
FSWrite []string FSWrite []string
ImagePush []string ImagePush []string
@@ -49,11 +46,6 @@ type EntitlementConf struct {
SSH bool SSH bool
} }
type EntitlementsDevicesConf struct {
All bool
Devices map[string]struct{}
}
func ParseEntitlements(in []string) (EntitlementConf, error) { func ParseEntitlements(in []string) (EntitlementConf, error) {
var conf EntitlementConf var conf EntitlementConf
for _, e := range in { for _, e := range in {
@@ -67,22 +59,6 @@ func ParseEntitlements(in []string) (EntitlementConf, error) {
default: default:
k, v, _ := strings.Cut(e, "=") k, v, _ := strings.Cut(e, "=")
switch k { switch k {
case string(EntitlementKeyDevice):
if v == "" {
conf.Devices = &EntitlementsDevicesConf{All: true}
continue
}
fields, err := csvvalue.Fields(v, nil)
if err != nil {
return EntitlementConf{}, errors.Wrapf(err, "failed to parse device entitlement %q", v)
}
if conf.Devices == nil {
conf.Devices = &EntitlementsDevicesConf{}
}
if conf.Devices.Devices == nil {
conf.Devices.Devices = make(map[string]struct{}, 0)
}
conf.Devices.Devices[fields[0]] = struct{}{}
case string(EntitlementKeyFSRead): case string(EntitlementKeyFSRead):
conf.FSRead = append(conf.FSRead, v) conf.FSRead = append(conf.FSRead, v)
case string(EntitlementKeyFSWrite): case string(EntitlementKeyFSWrite):
@@ -119,34 +95,12 @@ func (c EntitlementConf) Validate(m map[string]build.Options) (EntitlementConf,
func (c EntitlementConf) check(bo build.Options, expected *EntitlementConf) error { func (c EntitlementConf) check(bo build.Options, expected *EntitlementConf) error {
for _, e := range bo.Allow { for _, e := range bo.Allow {
k, rest, _ := strings.Cut(e, "=")
switch k {
case entitlements.EntitlementDevice.String():
if rest == "" {
if c.Devices == nil || !c.Devices.All {
expected.Devices = &EntitlementsDevicesConf{All: true}
}
continue
}
fields, err := csvvalue.Fields(rest, nil)
if err != nil {
return errors.Wrapf(err, "failed to parse device entitlement %q", rest)
}
if expected.Devices == nil {
expected.Devices = &EntitlementsDevicesConf{}
}
if expected.Devices.Devices == nil {
expected.Devices.Devices = make(map[string]struct{}, 0)
}
expected.Devices.Devices[fields[0]] = struct{}{}
}
switch e { switch e {
case entitlements.EntitlementNetworkHost.String(): case entitlements.EntitlementNetworkHost:
if !c.NetworkHost { if !c.NetworkHost {
expected.NetworkHost = true expected.NetworkHost = true
} }
case entitlements.EntitlementSecurityInsecure.String(): case entitlements.EntitlementSecurityInsecure:
if !c.SecurityInsecure { if !c.SecurityInsecure {
expected.SecurityInsecure = true expected.SecurityInsecure = true
} }
@@ -233,18 +187,6 @@ func (c EntitlementConf) Prompt(ctx context.Context, isRemote bool, out io.Write
flags = append(flags, string(EntitlementKeySecurityInsecure)) flags = append(flags, string(EntitlementKeySecurityInsecure))
} }
if c.Devices != nil {
if c.Devices.All {
msgs = append(msgs, " - Access to CDI devices")
flags = append(flags, string(EntitlementKeyDevice))
} else {
for d := range c.Devices.Devices {
msgs = append(msgs, fmt.Sprintf(" - Access to device %s", d))
flags = append(flags, string(EntitlementKeyDevice)+"="+d)
}
}
}
if c.SSH { if c.SSH {
msgsFS = append(msgsFS, " - Forwarding default SSH agent socket") msgsFS = append(msgsFS, " - Forwarding default SSH agent socket")
flagsFS = append(flagsFS, string(EntitlementKeySSH)) flagsFS = append(flagsFS, string(EntitlementKeySSH))
@@ -306,7 +248,7 @@ func (c EntitlementConf) Prompt(ctx context.Context, isRemote bool, out io.Write
fmt.Fprintf(out, "\nPass %q to grant requested privileges.\n", strings.Join(slices.Concat(flags, flagsFS), " ")) fmt.Fprintf(out, "\nPass %q to grant requested privileges.\n", strings.Join(slices.Concat(flags, flagsFS), " "))
} }
args := slices.Clone(os.Args) args := append([]string(nil), os.Args...)
if v, ok := os.LookupEnv("DOCKER_CLI_PLUGIN_ORIGINAL_CLI_COMMAND"); ok && v != "" { if v, ok := os.LookupEnv("DOCKER_CLI_PLUGIN_ORIGINAL_CLI_COMMAND"); ok && v != "" {
args[0] = v args[0] = v
} }

View File

@@ -208,8 +208,8 @@ func TestValidateEntitlements(t *testing.T) {
{ {
name: "NetworkHostMissing", name: "NetworkHostMissing",
opt: build.Options{ opt: build.Options{
Allow: []string{ Allow: []entitlements.Entitlement{
entitlements.EntitlementNetworkHost.String(), entitlements.EntitlementNetworkHost,
}, },
}, },
expected: EntitlementConf{ expected: EntitlementConf{
@@ -223,8 +223,8 @@ func TestValidateEntitlements(t *testing.T) {
NetworkHost: true, NetworkHost: true,
}, },
opt: build.Options{ opt: build.Options{
Allow: []string{ Allow: []entitlements.Entitlement{
entitlements.EntitlementNetworkHost.String(), entitlements.EntitlementNetworkHost,
}, },
}, },
expected: EntitlementConf{ expected: EntitlementConf{
@@ -234,9 +234,9 @@ func TestValidateEntitlements(t *testing.T) {
{ {
name: "SecurityAndNetworkHostMissing", name: "SecurityAndNetworkHostMissing",
opt: build.Options{ opt: build.Options{
Allow: []string{ Allow: []entitlements.Entitlement{
entitlements.EntitlementNetworkHost.String(), entitlements.EntitlementNetworkHost,
entitlements.EntitlementSecurityInsecure.String(), entitlements.EntitlementSecurityInsecure,
}, },
}, },
expected: EntitlementConf{ expected: EntitlementConf{
@@ -251,9 +251,9 @@ func TestValidateEntitlements(t *testing.T) {
NetworkHost: true, NetworkHost: true,
}, },
opt: build.Options{ opt: build.Options{
Allow: []string{ Allow: []entitlements.Entitlement{
entitlements.EntitlementNetworkHost.String(), entitlements.EntitlementNetworkHost,
entitlements.EntitlementSecurityInsecure.String(), entitlements.EntitlementSecurityInsecure,
}, },
}, },
expected: EntitlementConf{ expected: EntitlementConf{

View File

@@ -2,10 +2,8 @@ package bake
import ( import (
"reflect" "reflect"
"regexp"
"testing" "testing"
hcl "github.com/hashicorp/hcl/v2"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@@ -608,7 +606,7 @@ func TestHCLAttrsCapsuleType(t *testing.T) {
target "app" { target "app" {
attest = [ attest = [
{ type = "provenance", mode = "max" }, { type = "provenance", mode = "max" },
"type=sbom,disabled=true,generator=foo,\"ENV1=bar,baz\",ENV2=hello", "type=sbom,disabled=true",
] ]
cache-from = [ cache-from = [
@@ -641,7 +639,7 @@ func TestHCLAttrsCapsuleType(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, 1, len(c.Targets)) require.Equal(t, 1, len(c.Targets))
require.Equal(t, []string{"type=provenance,mode=max", "type=sbom,disabled=true,\"ENV1=bar,baz\",ENV2=hello,generator=foo"}, stringify(c.Targets[0].Attest)) require.Equal(t, []string{"type=provenance,mode=max", "type=sbom,disabled=true"}, stringify(c.Targets[0].Attest))
require.Equal(t, []string{"type=local,dest=../out", "type=oci,dest=../out.tar"}, stringify(c.Targets[0].Outputs)) require.Equal(t, []string{"type=local,dest=../out", "type=oci,dest=../out.tar"}, stringify(c.Targets[0].Outputs))
require.Equal(t, []string{"type=local,src=path/to/cache", "user/app:cache"}, stringify(c.Targets[0].CacheFrom)) require.Equal(t, []string{"type=local,src=path/to/cache", "user/app:cache"}, stringify(c.Targets[0].CacheFrom))
require.Equal(t, []string{"type=local,dest=path/to/cache"}, stringify(c.Targets[0].CacheTo)) require.Equal(t, []string{"type=local,dest=path/to/cache"}, stringify(c.Targets[0].CacheTo))
@@ -649,7 +647,7 @@ func TestHCLAttrsCapsuleType(t *testing.T) {
require.Equal(t, []string{"default", "key=path/to/key"}, stringify(c.Targets[0].SSH)) require.Equal(t, []string{"default", "key=path/to/key"}, stringify(c.Targets[0].SSH))
} }
func TestHCLAttrsCapsuleType_ObjectVars(t *testing.T) { func TestHCLAttrsCapsuleTypeVars(t *testing.T) {
dt := []byte(` dt := []byte(`
variable "foo" { variable "foo" {
default = "bar" default = "bar"
@@ -718,52 +716,6 @@ func TestHCLAttrsCapsuleType_ObjectVars(t *testing.T) {
require.Equal(t, []string{"id=oci,src=/local/secret"}, stringify(web.Secrets)) require.Equal(t, []string{"id=oci,src=/local/secret"}, stringify(web.Secrets))
} }
func TestHCLAttrsCapsuleType_MissingVars(t *testing.T) {
dt := []byte(`
target "app" {
attest = [
"type=sbom,disabled=${SBOM}",
]
cache-from = [
{ type = "registry", ref = "user/app:${FOO1}" },
"type=local,src=path/to/cache:${FOO2}",
]
cache-to = [
{ type = "local", dest = "path/to/${BAR}" },
]
output = [
{ type = "oci", dest = "../${OUTPUT}.tar" },
]
secret = [
{ id = "mysecret", src = "/local/${SECRET}" },
]
ssh = [
{ id = "key", paths = ["path/to/${SSH_KEY}"] },
]
}
`)
var diags hcl.Diagnostics
_, err := ParseFile(dt, "docker-bake.hcl")
require.ErrorAs(t, err, &diags)
re := regexp.MustCompile(`There is no variable named "([\w\d_]+)"`)
var actual []string
for _, diag := range diags {
if m := re.FindStringSubmatch(diag.Error()); m != nil {
actual = append(actual, m[1])
}
}
require.ElementsMatch(t,
[]string{"SBOM", "FOO1", "FOO2", "BAR", "OUTPUT", "SECRET", "SSH_KEY"},
actual)
}
func TestHCLMultiFileAttrs(t *testing.T) { func TestHCLMultiFileAttrs(t *testing.T) {
dt := []byte(` dt := []byte(`
variable "FOO" { variable "FOO" {
@@ -1645,7 +1597,7 @@ func TestHCLIndexOfFunc(t *testing.T) {
require.Empty(t, c.Targets[1].Tags[1]) require.Empty(t, c.Targets[1].Tags[1])
} }
func ptrstr(s any) *string { func ptrstr(s interface{}) *string {
var n *string var n *string
if reflect.ValueOf(s).Kind() == reflect.String { if reflect.ValueOf(s).Kind() == reflect.String {
ss := s.(string) ss := s.(string)

View File

@@ -15,11 +15,11 @@ import (
// DecodeOptions allows customizing sections of the decoding process. // DecodeOptions allows customizing sections of the decoding process.
type DecodeOptions struct { type DecodeOptions struct {
ImpliedType func(gv any) (cty.Type, error) ImpliedType func(gv interface{}) (cty.Type, error)
Convert func(in cty.Value, want cty.Type) (cty.Value, error) Convert func(in cty.Value, want cty.Type) (cty.Value, error)
} }
func (o DecodeOptions) DecodeBody(body hcl.Body, ctx *hcl.EvalContext, val any) hcl.Diagnostics { func (o DecodeOptions) DecodeBody(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
o = o.withDefaults() o = o.withDefaults()
rv := reflect.ValueOf(val) rv := reflect.ValueOf(val)
@@ -46,7 +46,7 @@ func (o DecodeOptions) DecodeBody(body hcl.Body, ctx *hcl.EvalContext, val any)
// are returned then the given value may have been partially-populated but // are returned then the given value may have been partially-populated but
// may still be accessed by a careful caller for static analysis and editor // may still be accessed by a careful caller for static analysis and editor
// integration use-cases. // integration use-cases.
func DecodeBody(body hcl.Body, ctx *hcl.EvalContext, val any) hcl.Diagnostics { func DecodeBody(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
return DecodeOptions{}.DecodeBody(body, ctx, val) return DecodeOptions{}.DecodeBody(body, ctx, val)
} }
@@ -282,7 +282,7 @@ func (o DecodeOptions) decodeBlockToValue(block *hcl.Block, ctx *hcl.EvalContext
return diags return diags
} }
func (o DecodeOptions) DecodeExpression(expr hcl.Expression, ctx *hcl.EvalContext, val any) hcl.Diagnostics { func (o DecodeOptions) DecodeExpression(expr hcl.Expression, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
o = o.withDefaults() o = o.withDefaults()
srcVal, diags := expr.Value(ctx) srcVal, diags := expr.Value(ctx)
@@ -332,7 +332,7 @@ func (o DecodeOptions) DecodeExpression(expr hcl.Expression, ctx *hcl.EvalContex
// are returned then the given value may have been partially-populated but // are returned then the given value may have been partially-populated but
// may still be accessed by a careful caller for static analysis and editor // may still be accessed by a careful caller for static analysis and editor
// integration use-cases. // integration use-cases.
func DecodeExpression(expr hcl.Expression, ctx *hcl.EvalContext, val any) hcl.Diagnostics { func DecodeExpression(expr hcl.Expression, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
return DecodeOptions{}.DecodeExpression(expr, ctx, val) return DecodeOptions{}.DecodeExpression(expr, ctx, val)
} }

View File

@@ -16,8 +16,8 @@ import (
) )
func TestDecodeBody(t *testing.T) { func TestDecodeBody(t *testing.T) {
deepEquals := func(other any) func(v any) bool { deepEquals := func(other interface{}) func(v interface{}) bool {
return func(v any) bool { return func(v interface{}) bool {
return reflect.DeepEqual(v, other) return reflect.DeepEqual(v, other)
} }
} }
@@ -45,19 +45,19 @@ func TestDecodeBody(t *testing.T) {
} }
tests := []struct { tests := []struct {
Body map[string]any Body map[string]interface{}
Target func() any Target func() interface{}
Check func(v any) bool Check func(v interface{}) bool
DiagCount int DiagCount int
}{ }{
{ {
map[string]any{}, map[string]interface{}{},
makeInstantiateType(struct{}{}), makeInstantiateType(struct{}{}),
deepEquals(struct{}{}), deepEquals(struct{}{}),
0, 0,
}, },
{ {
map[string]any{}, map[string]interface{}{},
makeInstantiateType(struct { makeInstantiateType(struct {
Name string `hcl:"name"` Name string `hcl:"name"`
}{}), }{}),
@@ -67,7 +67,7 @@ func TestDecodeBody(t *testing.T) {
1, // name is required 1, // name is required
}, },
{ {
map[string]any{}, map[string]interface{}{},
makeInstantiateType(struct { makeInstantiateType(struct {
Name *string `hcl:"name"` Name *string `hcl:"name"`
}{}), }{}),
@@ -77,7 +77,7 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, // name nil }, // name nil
{ {
map[string]any{}, map[string]interface{}{},
makeInstantiateType(struct { makeInstantiateType(struct {
Name string `hcl:"name,optional"` Name string `hcl:"name,optional"`
}{}), }{}),
@@ -87,9 +87,9 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, // name optional }, // name optional
{ {
map[string]any{}, map[string]interface{}{},
makeInstantiateType(withNameExpression{}), makeInstantiateType(withNameExpression{}),
func(v any) bool { func(v interface{}) bool {
if v == nil { if v == nil {
return false return false
} }
@@ -109,11 +109,11 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
}, },
makeInstantiateType(withNameExpression{}), makeInstantiateType(withNameExpression{}),
func(v any) bool { func(v interface{}) bool {
if v == nil { if v == nil {
return false return false
} }
@@ -133,7 +133,7 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
@@ -145,7 +145,7 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
"age": 23, "age": 23,
}, },
@@ -158,7 +158,7 @@ func TestDecodeBody(t *testing.T) {
1, // Extraneous "age" property 1, // Extraneous "age" property
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
"age": 50, "age": 50,
}, },
@@ -166,7 +166,7 @@ func TestDecodeBody(t *testing.T) {
Name string `hcl:"name"` Name string `hcl:"name"`
Attrs hcl.Attributes `hcl:",remain"` Attrs hcl.Attributes `hcl:",remain"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
got := gotI.(struct { got := gotI.(struct {
Name string `hcl:"name"` Name string `hcl:"name"`
Attrs hcl.Attributes `hcl:",remain"` Attrs hcl.Attributes `hcl:",remain"`
@@ -176,7 +176,7 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
"age": 50, "age": 50,
}, },
@@ -184,7 +184,7 @@ func TestDecodeBody(t *testing.T) {
Name string `hcl:"name"` Name string `hcl:"name"`
Remain hcl.Body `hcl:",remain"` Remain hcl.Body `hcl:",remain"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
got := gotI.(struct { got := gotI.(struct {
Name string `hcl:"name"` Name string `hcl:"name"`
Remain hcl.Body `hcl:",remain"` Remain hcl.Body `hcl:",remain"`
@@ -197,7 +197,7 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
"living": true, "living": true,
}, },
@@ -217,7 +217,7 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
"age": 50, "age": 50,
}, },
@@ -226,7 +226,7 @@ func TestDecodeBody(t *testing.T) {
Body hcl.Body `hcl:",body"` Body hcl.Body `hcl:",body"`
Remain hcl.Body `hcl:",remain"` Remain hcl.Body `hcl:",remain"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
got := gotI.(struct { got := gotI.(struct {
Name string `hcl:"name"` Name string `hcl:"name"`
Body hcl.Body `hcl:",body"` Body hcl.Body `hcl:",body"`
@@ -241,76 +241,76 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": map[string]any{}, "noodle": map[string]interface{}{},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle struct{} `hcl:"noodle,block"` Noodle struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
// Generating no diagnostics is good enough for this one. // Generating no diagnostics is good enough for this one.
return true return true
}, },
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": []map[string]any{{}}, "noodle": []map[string]interface{}{{}},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle struct{} `hcl:"noodle,block"` Noodle struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
// Generating no diagnostics is good enough for this one. // Generating no diagnostics is good enough for this one.
return true return true
}, },
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": []map[string]any{{}, {}}, "noodle": []map[string]interface{}{{}, {}},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle struct{} `hcl:"noodle,block"` Noodle struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
// Generating one diagnostic is good enough for this one. // Generating one diagnostic is good enough for this one.
return true return true
}, },
1, 1,
}, },
{ {
map[string]any{}, map[string]interface{}{},
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle struct{} `hcl:"noodle,block"` Noodle struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
// Generating one diagnostic is good enough for this one. // Generating one diagnostic is good enough for this one.
return true return true
}, },
1, 1,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": []map[string]any{}, "noodle": []map[string]interface{}{},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle struct{} `hcl:"noodle,block"` Noodle struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
// Generating one diagnostic is good enough for this one. // Generating one diagnostic is good enough for this one.
return true return true
}, },
1, 1,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": map[string]any{}, "noodle": map[string]interface{}{},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle *struct{} `hcl:"noodle,block"` Noodle *struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
return gotI.(struct { return gotI.(struct {
Noodle *struct{} `hcl:"noodle,block"` Noodle *struct{} `hcl:"noodle,block"`
}).Noodle != nil }).Noodle != nil
@@ -318,13 +318,13 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": []map[string]any{{}}, "noodle": []map[string]interface{}{{}},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle *struct{} `hcl:"noodle,block"` Noodle *struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
return gotI.(struct { return gotI.(struct {
Noodle *struct{} `hcl:"noodle,block"` Noodle *struct{} `hcl:"noodle,block"`
}).Noodle != nil }).Noodle != nil
@@ -332,13 +332,13 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": []map[string]any{}, "noodle": []map[string]interface{}{},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle *struct{} `hcl:"noodle,block"` Noodle *struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
return gotI.(struct { return gotI.(struct {
Noodle *struct{} `hcl:"noodle,block"` Noodle *struct{} `hcl:"noodle,block"`
}).Noodle == nil }).Noodle == nil
@@ -346,26 +346,26 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": []map[string]any{{}, {}}, "noodle": []map[string]interface{}{{}, {}},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle *struct{} `hcl:"noodle,block"` Noodle *struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
// Generating one diagnostic is good enough for this one. // Generating one diagnostic is good enough for this one.
return true return true
}, },
1, 1,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": []map[string]any{}, "noodle": []map[string]interface{}{},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle []struct{} `hcl:"noodle,block"` Noodle []struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
noodle := gotI.(struct { noodle := gotI.(struct {
Noodle []struct{} `hcl:"noodle,block"` Noodle []struct{} `hcl:"noodle,block"`
}).Noodle }).Noodle
@@ -374,13 +374,13 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": []map[string]any{{}}, "noodle": []map[string]interface{}{{}},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle []struct{} `hcl:"noodle,block"` Noodle []struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
noodle := gotI.(struct { noodle := gotI.(struct {
Noodle []struct{} `hcl:"noodle,block"` Noodle []struct{} `hcl:"noodle,block"`
}).Noodle }).Noodle
@@ -389,13 +389,13 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": []map[string]any{{}, {}}, "noodle": []map[string]interface{}{{}, {}},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle []struct{} `hcl:"noodle,block"` Noodle []struct{} `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
noodle := gotI.(struct { noodle := gotI.(struct {
Noodle []struct{} `hcl:"noodle,block"` Noodle []struct{} `hcl:"noodle,block"`
}).Noodle }).Noodle
@@ -404,15 +404,15 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": map[string]any{}, "noodle": map[string]interface{}{},
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
Noodle struct { Noodle struct {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
} `hcl:"noodle,block"` } `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
//nolint:misspell //nolint:misspell
// Generating two diagnostics is good enough for this one. // Generating two diagnostics is good enough for this one.
// (one for the missing noodle block and the other for // (one for the missing noodle block and the other for
@@ -423,9 +423,9 @@ func TestDecodeBody(t *testing.T) {
2, 2,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": map[string]any{ "noodle": map[string]interface{}{
"foo_foo": map[string]any{}, "foo_foo": map[string]interface{}{},
}, },
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
@@ -433,7 +433,7 @@ func TestDecodeBody(t *testing.T) {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
} `hcl:"noodle,block"` } `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
noodle := gotI.(struct { noodle := gotI.(struct {
Noodle struct { Noodle struct {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
@@ -444,10 +444,10 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": map[string]any{ "noodle": map[string]interface{}{
"foo_foo": map[string]any{}, "foo_foo": map[string]interface{}{},
"bar_baz": map[string]any{}, "bar_baz": map[string]interface{}{},
}, },
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
@@ -455,17 +455,17 @@ func TestDecodeBody(t *testing.T) {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
} `hcl:"noodle,block"` } `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
// One diagnostic is enough for this one. // One diagnostic is enough for this one.
return true return true
}, },
1, 1,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": map[string]any{ "noodle": map[string]interface{}{
"foo_foo": map[string]any{}, "foo_foo": map[string]interface{}{},
"bar_baz": map[string]any{}, "bar_baz": map[string]interface{}{},
}, },
}, },
makeInstantiateType(struct { makeInstantiateType(struct {
@@ -473,7 +473,7 @@ func TestDecodeBody(t *testing.T) {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
} `hcl:"noodle,block"` } `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
noodles := gotI.(struct { noodles := gotI.(struct {
Noodles []struct { Noodles []struct {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
@@ -484,9 +484,9 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"noodle": map[string]any{ "noodle": map[string]interface{}{
"foo_foo": map[string]any{ "foo_foo": map[string]interface{}{
"type": "rice", "type": "rice",
}, },
}, },
@@ -497,7 +497,7 @@ func TestDecodeBody(t *testing.T) {
Type string `hcl:"type"` Type string `hcl:"type"`
} `hcl:"noodle,block"` } `hcl:"noodle,block"`
}{}), }{}),
func(gotI any) bool { func(gotI interface{}) bool {
noodle := gotI.(struct { noodle := gotI.(struct {
Noodle struct { Noodle struct {
Name string `hcl:"name,label"` Name string `hcl:"name,label"`
@@ -510,7 +510,7 @@ func TestDecodeBody(t *testing.T) {
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
"age": 34, "age": 34,
}, },
@@ -522,31 +522,31 @@ func TestDecodeBody(t *testing.T) {
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
"age": 89, "age": 89,
}, },
makeInstantiateType(map[string]*hcl.Attribute(nil)), makeInstantiateType(map[string]*hcl.Attribute(nil)),
func(gotI any) bool { func(gotI interface{}) bool {
got := gotI.(map[string]*hcl.Attribute) got := gotI.(map[string]*hcl.Attribute)
return len(got) == 2 && got["name"] != nil && got["age"] != nil return len(got) == 2 && got["name"] != nil && got["age"] != nil
}, },
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
"age": 13, "age": 13,
}, },
makeInstantiateType(map[string]hcl.Expression(nil)), makeInstantiateType(map[string]hcl.Expression(nil)),
func(gotI any) bool { func(gotI interface{}) bool {
got := gotI.(map[string]hcl.Expression) got := gotI.(map[string]hcl.Expression)
return len(got) == 2 && got["name"] != nil && got["age"] != nil return len(got) == 2 && got["name"] != nil && got["age"] != nil
}, },
0, 0,
}, },
{ {
map[string]any{ map[string]interface{}{
"name": "Ermintrude", "name": "Ermintrude",
"living": true, "living": true,
}, },
@@ -559,10 +559,10 @@ func TestDecodeBody(t *testing.T) {
}, },
{ {
// Retain "nested" block while decoding // Retain "nested" block while decoding
map[string]any{ map[string]interface{}{
"plain": "foo", "plain": "foo",
}, },
func() any { func() interface{} {
return &withNestedBlock{ return &withNestedBlock{
Plain: "bar", Plain: "bar",
Nested: &withTwoAttributes{ Nested: &withTwoAttributes{
@@ -570,7 +570,7 @@ func TestDecodeBody(t *testing.T) {
}, },
} }
}, },
func(gotI any) bool { func(gotI interface{}) bool {
foo := gotI.(withNestedBlock) foo := gotI.(withNestedBlock)
return foo.Plain == "foo" && foo.Nested != nil && foo.Nested.A == "bar" return foo.Plain == "foo" && foo.Nested != nil && foo.Nested.A == "bar"
}, },
@@ -578,19 +578,19 @@ func TestDecodeBody(t *testing.T) {
}, },
{ {
// Retain values in "nested" block while decoding // Retain values in "nested" block while decoding
map[string]any{ map[string]interface{}{
"nested": map[string]any{ "nested": map[string]interface{}{
"a": "foo", "a": "foo",
}, },
}, },
func() any { func() interface{} {
return &withNestedBlock{ return &withNestedBlock{
Nested: &withTwoAttributes{ Nested: &withTwoAttributes{
B: "bar", B: "bar",
}, },
} }
}, },
func(gotI any) bool { func(gotI interface{}) bool {
foo := gotI.(withNestedBlock) foo := gotI.(withNestedBlock)
return foo.Nested.A == "foo" && foo.Nested.B == "bar" return foo.Nested.A == "foo" && foo.Nested.B == "bar"
}, },
@@ -598,14 +598,14 @@ func TestDecodeBody(t *testing.T) {
}, },
{ {
// Retain values in "nested" block list while decoding // Retain values in "nested" block list while decoding
map[string]any{ map[string]interface{}{
"nested": []map[string]any{ "nested": []map[string]interface{}{
{ {
"a": "foo", "a": "foo",
}, },
}, },
}, },
func() any { func() interface{} {
return &withListofNestedBlocks{ return &withListofNestedBlocks{
Nested: []*withTwoAttributes{ Nested: []*withTwoAttributes{
{ {
@@ -614,7 +614,7 @@ func TestDecodeBody(t *testing.T) {
}, },
} }
}, },
func(gotI any) bool { func(gotI interface{}) bool {
n := gotI.(withListofNestedBlocks) n := gotI.(withListofNestedBlocks)
return n.Nested[0].A == "foo" && n.Nested[0].B == "bar" return n.Nested[0].A == "foo" && n.Nested[0].B == "bar"
}, },
@@ -622,14 +622,14 @@ func TestDecodeBody(t *testing.T) {
}, },
{ {
// Remove additional elements from the list while decoding nested blocks // Remove additional elements from the list while decoding nested blocks
map[string]any{ map[string]interface{}{
"nested": []map[string]any{ "nested": []map[string]interface{}{
{ {
"a": "foo", "a": "foo",
}, },
}, },
}, },
func() any { func() interface{} {
return &withListofNestedBlocks{ return &withListofNestedBlocks{
Nested: []*withTwoAttributes{ Nested: []*withTwoAttributes{
{ {
@@ -641,7 +641,7 @@ func TestDecodeBody(t *testing.T) {
}, },
} }
}, },
func(gotI any) bool { func(gotI interface{}) bool {
n := gotI.(withListofNestedBlocks) n := gotI.(withListofNestedBlocks)
return len(n.Nested) == 1 return len(n.Nested) == 1
}, },
@@ -649,8 +649,8 @@ func TestDecodeBody(t *testing.T) {
}, },
{ {
// Make sure decoding value slices works the same as pointer slices. // Make sure decoding value slices works the same as pointer slices.
map[string]any{ map[string]interface{}{
"nested": []map[string]any{ "nested": []map[string]interface{}{
{ {
"b": "bar", "b": "bar",
}, },
@@ -659,7 +659,7 @@ func TestDecodeBody(t *testing.T) {
}, },
}, },
}, },
func() any { func() interface{} {
return &withListofNestedBlocksNoPointers{ return &withListofNestedBlocksNoPointers{
Nested: []withTwoAttributes{ Nested: []withTwoAttributes{
{ {
@@ -668,7 +668,7 @@ func TestDecodeBody(t *testing.T) {
}, },
} }
}, },
func(gotI any) bool { func(gotI interface{}) bool {
n := gotI.(withListofNestedBlocksNoPointers) n := gotI.(withListofNestedBlocksNoPointers)
return n.Nested[0].B == "bar" && len(n.Nested) == 2 return n.Nested[0].B == "bar" && len(n.Nested) == 2
}, },
@@ -710,8 +710,8 @@ func TestDecodeBody(t *testing.T) {
func TestDecodeExpression(t *testing.T) { func TestDecodeExpression(t *testing.T) {
tests := []struct { tests := []struct {
Value cty.Value Value cty.Value
Target any Target interface{}
Want any Want interface{}
DiagCount int DiagCount int
}{ }{
{ {
@@ -799,8 +799,8 @@ func (e *fixedExpression) Variables() []hcl.Traversal {
return nil return nil
} }
func makeInstantiateType(target any) func() any { func makeInstantiateType(target interface{}) func() interface{} {
return func() any { return func() interface{} {
return reflect.New(reflect.TypeOf(target)).Interface() return reflect.New(reflect.TypeOf(target)).Interface()
} }
} }

View File

@@ -34,9 +34,9 @@ import (
// The layout of the resulting HCL source is derived from the ordering of // The layout of the resulting HCL source is derived from the ordering of
// the struct fields, with blank lines around nested blocks of different types. // the struct fields, with blank lines around nested blocks of different types.
// Fields representing attributes should usually precede those representing // Fields representing attributes should usually precede those representing
// blocks so that the attributes can group together in the result. For more // blocks so that the attributes can group togather in the result. For more
// control, use the hclwrite API directly. // control, use the hclwrite API directly.
func EncodeIntoBody(val any, dst *hclwrite.Body) { func EncodeIntoBody(val interface{}, dst *hclwrite.Body) {
rv := reflect.ValueOf(val) rv := reflect.ValueOf(val)
ty := rv.Type() ty := rv.Type()
if ty.Kind() == reflect.Ptr { if ty.Kind() == reflect.Ptr {
@@ -60,7 +60,7 @@ func EncodeIntoBody(val any, dst *hclwrite.Body) {
// //
// This function has the same constraints as EncodeIntoBody and will panic // This function has the same constraints as EncodeIntoBody and will panic
// if they are violated. // if they are violated.
func EncodeAsBlock(val any, blockType string) *hclwrite.Block { func EncodeAsBlock(val interface{}, blockType string) *hclwrite.Block {
rv := reflect.ValueOf(val) rv := reflect.ValueOf(val)
ty := rv.Type() ty := rv.Type()
if ty.Kind() == reflect.Ptr { if ty.Kind() == reflect.Ptr {
@@ -158,7 +158,7 @@ func populateBody(rv reflect.Value, ty reflect.Type, tags *fieldTags, dst *hclwr
if isSeq { if isSeq {
l := fieldVal.Len() l := fieldVal.Len()
for i := range l { for i := 0; i < l; i++ {
elemVal := fieldVal.Index(i) elemVal := fieldVal.Index(i)
if !elemVal.IsValid() { if !elemVal.IsValid() {
continue // ignore (elem value is nil pointer) continue // ignore (elem value is nil pointer)

View File

@@ -22,7 +22,7 @@ import (
// This uses the tags on the fields of the struct to discover how each // This uses the tags on the fields of the struct to discover how each
// field's value should be expressed within configuration. If an invalid // field's value should be expressed within configuration. If an invalid
// mapping is attempted, this function will panic. // mapping is attempted, this function will panic.
func ImpliedBodySchema(val any) (schema *hcl.BodySchema, partial bool) { func ImpliedBodySchema(val interface{}) (schema *hcl.BodySchema, partial bool) {
ty := reflect.TypeOf(val) ty := reflect.TypeOf(val)
if ty.Kind() == reflect.Ptr { if ty.Kind() == reflect.Ptr {
@@ -134,7 +134,7 @@ func getFieldTags(ty reflect.Type) *fieldTags {
} }
ct := ty.NumField() ct := ty.NumField()
for i := range ct { for i := 0; i < ct; i++ {
field := ty.Field(i) field := ty.Field(i)
tag := field.Tag.Get("hcl") tag := field.Tag.Get("hcl")
if tag == "" { if tag == "" {

View File

@@ -14,7 +14,7 @@ import (
func TestImpliedBodySchema(t *testing.T) { func TestImpliedBodySchema(t *testing.T) {
tests := []struct { tests := []struct {
val any val interface{}
wantSchema *hcl.BodySchema wantSchema *hcl.BodySchema
wantPartial bool wantPartial bool
}{ }{

View File

@@ -7,7 +7,6 @@ import (
"math" "math"
"math/big" "math/big"
"reflect" "reflect"
"slices"
"strconv" "strconv"
"strings" "strings"
@@ -16,7 +15,6 @@ import (
"github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
) )
type Opt struct { type Opt struct {
@@ -556,57 +554,27 @@ func (p *parser) resolveBlockNames(block *hcl.Block) ([]string, error) {
func (p *parser) validateVariables(vars map[string]*variable, ectx *hcl.EvalContext) hcl.Diagnostics { func (p *parser) validateVariables(vars map[string]*variable, ectx *hcl.EvalContext) hcl.Diagnostics {
var diags hcl.Diagnostics var diags hcl.Diagnostics
for _, v := range vars { for _, v := range vars {
for _, rule := range v.Validations { for _, validation := range v.Validations {
resultVal, condDiags := rule.Condition.Value(ectx) condition, condDiags := validation.Condition.Value(ectx)
if condDiags.HasErrors() { if condDiags.HasErrors() {
diags = append(diags, condDiags...) diags = append(diags, condDiags...)
continue continue
} }
if !condition.True() {
if resultVal.IsNull() { message, msgDiags := validation.ErrorMessage.Value(ectx)
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid condition result",
Detail: "Condition expression must return either true or false, not null.",
Subject: rule.Condition.Range().Ptr(),
Expression: rule.Condition,
})
continue
}
var err error
resultVal, err = convert.Convert(resultVal, cty.Bool)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid condition result",
Detail: fmt.Sprintf("Invalid condition result value: %s", err),
Subject: rule.Condition.Range().Ptr(),
Expression: rule.Condition,
})
continue
}
if !resultVal.True() {
message, msgDiags := rule.ErrorMessage.Value(ectx)
if msgDiags.HasErrors() { if msgDiags.HasErrors() {
diags = append(diags, msgDiags...) diags = append(diags, msgDiags...)
continue continue
} }
errorMessage := "This check failed, but has an invalid error message."
if !message.IsNull() {
errorMessage = message.AsString()
}
diags = append(diags, &hcl.Diagnostic{ diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError, Severity: hcl.DiagError,
Summary: "Validation failed", Summary: "Validation failed",
Detail: errorMessage, Detail: message.AsString(),
Subject: rule.Condition.Range().Ptr(), Subject: validation.Condition.Range().Ptr(),
}) })
} }
} }
} }
return diags return diags
} }
@@ -621,7 +589,7 @@ type ParseMeta struct {
AllVariables []*Variable AllVariables []*Variable
} }
func Parse(b hcl.Body, opt Opt, val any) (*ParseMeta, hcl.Diagnostics) { func Parse(b hcl.Body, opt Opt, val interface{}) (*ParseMeta, hcl.Diagnostics) {
reserved := map[string]struct{}{} reserved := map[string]struct{}{}
schema, _ := gohcl.ImpliedBodySchema(val) schema, _ := gohcl.ImpliedBodySchema(val)
@@ -795,7 +763,7 @@ func Parse(b hcl.Body, opt Opt, val any) (*ParseMeta, hcl.Diagnostics) {
types := map[string]field{} types := map[string]field{}
renamed := map[string]map[string][]string{} renamed := map[string]map[string][]string{}
vt := reflect.ValueOf(val).Elem().Type() vt := reflect.ValueOf(val).Elem().Type()
for i := range vt.NumField() { for i := 0; i < vt.NumField(); i++ {
tags := strings.Split(vt.Field(i).Tag.Get("hcl"), ",") tags := strings.Split(vt.Field(i).Tag.Get("hcl"), ",")
p.blockTypes[tags[0]] = vt.Field(i).Type.Elem().Elem() p.blockTypes[tags[0]] = vt.Field(i).Type.Elem().Elem()
@@ -863,7 +831,7 @@ func Parse(b hcl.Body, opt Opt, val any) (*ParseMeta, hcl.Diagnostics) {
oldValue, exists := t.values[lblName] oldValue, exists := t.values[lblName]
if !exists && lblExists { if !exists && lblExists {
if v.Elem().Field(t.idx).Type().Kind() == reflect.Slice { if v.Elem().Field(t.idx).Type().Kind() == reflect.Slice {
for i := range v.Elem().Field(t.idx).Len() { for i := 0; i < v.Elem().Field(t.idx).Len(); i++ {
if lblName == v.Elem().Field(t.idx).Index(i).Elem().Field(lblIndex).String() { if lblName == v.Elem().Field(t.idx).Index(i).Elem().Field(lblIndex).String() {
exists = true exists = true
oldValue = value{Value: v.Elem().Field(t.idx).Index(i), idx: i} oldValue = value{Value: v.Elem().Field(t.idx).Index(i), idx: i}
@@ -930,7 +898,7 @@ func wrapErrorDiagnostic(message string, err error, subject *hcl.Range, context
func setName(v reflect.Value, name string) { func setName(v reflect.Value, name string) {
numFields := v.Elem().Type().NumField() numFields := v.Elem().Type().NumField()
for i := range numFields { for i := 0; i < numFields; i++ {
parts := strings.Split(v.Elem().Type().Field(i).Tag.Get("hcl"), ",") parts := strings.Split(v.Elem().Type().Field(i).Tag.Get("hcl"), ",")
for _, t := range parts[1:] { for _, t := range parts[1:] {
if t == "label" { if t == "label" {
@@ -942,10 +910,12 @@ func setName(v reflect.Value, name string) {
func getName(v reflect.Value) (string, bool) { func getName(v reflect.Value) (string, bool) {
numFields := v.Elem().Type().NumField() numFields := v.Elem().Type().NumField()
for i := range numFields { for i := 0; i < numFields; i++ {
parts := strings.Split(v.Elem().Type().Field(i).Tag.Get("hcl"), ",") parts := strings.Split(v.Elem().Type().Field(i).Tag.Get("hcl"), ",")
if slices.Contains(parts[1:], "label") { for _, t := range parts[1:] {
return v.Elem().Field(i).String(), true if t == "label" {
return v.Elem().Field(i).String(), true
}
} }
} }
return "", false return "", false
@@ -953,10 +923,12 @@ func getName(v reflect.Value) (string, bool) {
func getNameIndex(v reflect.Value) (int, bool) { func getNameIndex(v reflect.Value) (int, bool) {
numFields := v.Elem().Type().NumField() numFields := v.Elem().Type().NumField()
for i := range numFields { for i := 0; i < numFields; i++ {
parts := strings.Split(v.Elem().Type().Field(i).Tag.Get("hcl"), ",") parts := strings.Split(v.Elem().Type().Field(i).Tag.Get("hcl"), ",")
if slices.Contains(parts[1:], "label") { for _, t := range parts[1:] {
return i, true if t == "label" {
return i, true
}
} }
} }
return 0, false return 0, false
@@ -1016,7 +988,7 @@ func key(ks ...any) uint64 {
return hash.Sum64() return hash.Sum64()
} }
func decodeBody(body hcl.Body, ctx *hcl.EvalContext, val any) hcl.Diagnostics { func decodeBody(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
dec := gohcl.DecodeOptions{ImpliedType: ImpliedType} dec := gohcl.DecodeOptions{ImpliedType: ImpliedType}
return dec.DecodeBody(body, ctx, val) return dec.DecodeBody(body, ctx, val)
} }

View File

@@ -43,7 +43,7 @@ import (
// In particular, ImpliedType will never use capsule types in its returned // In particular, ImpliedType will never use capsule types in its returned
// type, because it cannot know the capsule types supported by the calling // type, because it cannot know the capsule types supported by the calling
// program. // program.
func ImpliedType(gv any) (cty.Type, error) { func ImpliedType(gv interface{}) (cty.Type, error) {
rt := reflect.TypeOf(gv) rt := reflect.TypeOf(gv)
var path cty.Path var path cty.Path
return impliedType(rt, path) return impliedType(rt, path)
@@ -148,7 +148,7 @@ func structTagIndices(st reflect.Type) map[string]int {
ct := st.NumField() ct := st.NumField()
ret := make(map[string]int, ct) ret := make(map[string]int, ct)
for i := range ct { for i := 0; i < ct; i++ {
field := st.Field(i) field := st.Field(i)
attrName := field.Tag.Get("cty") attrName := field.Tag.Get("cty")
if attrName != "" { if attrName != "" {

View File

@@ -8,7 +8,6 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"maps"
"os" "os"
"slices" "slices"
"strconv" "strconv"
@@ -41,6 +40,7 @@ import (
"github.com/moby/buildkit/solver/errdefs" "github.com/moby/buildkit/solver/errdefs"
"github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/solver/pb"
spb "github.com/moby/buildkit/sourcepolicy/pb" spb "github.com/moby/buildkit/sourcepolicy/pb"
"github.com/moby/buildkit/util/entitlements"
"github.com/moby/buildkit/util/progress/progresswriter" "github.com/moby/buildkit/util/progress/progresswriter"
"github.com/moby/buildkit/util/tracing" "github.com/moby/buildkit/util/tracing"
"github.com/opencontainers/go-digest" "github.com/opencontainers/go-digest"
@@ -63,7 +63,7 @@ type Options struct {
Inputs Inputs Inputs Inputs
Ref string Ref string
Allow []string Allow []entitlements.Entitlement
Attests map[string]*string Attests map[string]*string
BuildArgs map[string]string BuildArgs map[string]string
CacheFrom []client.CacheOptionsEntry CacheFrom []client.CacheOptionsEntry
@@ -432,7 +432,9 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opts map[
FrontendInputs: frontendInputs, FrontendInputs: frontendInputs,
FrontendOpt: make(map[string]string), FrontendOpt: make(map[string]string),
} }
maps.Copy(req.FrontendOpt, so.FrontendAttrs) for k, v := range so.FrontendAttrs {
req.FrontendOpt[k] = v
}
so.Frontend = "" so.Frontend = ""
so.FrontendInputs = nil so.FrontendInputs = nil
@@ -538,7 +540,7 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opts map[
node := dp.Node().Driver node := dp.Node().Driver
if node.IsMobyDriver() { if node.IsMobyDriver() {
for _, e := range so.Exports { for _, e := range so.Exports {
if e.Type == "moby" && e.Attrs["push"] != "" && !node.Features(ctx)[driver.DirectPush] { if e.Type == "moby" && e.Attrs["push"] != "" {
if ok, _ := strconv.ParseBool(e.Attrs["push"]); ok { if ok, _ := strconv.ParseBool(e.Attrs["push"]); ok {
pushNames = e.Attrs["name"] pushNames = e.Attrs["name"]
if pushNames == "" { if pushNames == "" {
@@ -621,7 +623,7 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opts map[
// This is fallback for some very old buildkit versions. // This is fallback for some very old buildkit versions.
// Note that the mediatype isn't really correct as most of the time it is image manifest and // Note that the mediatype isn't really correct as most of the time it is image manifest and
// not manifest list but actually both are handled because for Docker mediatypes the // not manifest list but actually both are handled because for Docker mediatypes the
// mediatype value in the Accept header does not seem to matter. // mediatype value in the Accpet header does not seem to matter.
s, ok = r.ExporterResponse[exptypes.ExporterImageDigestKey] s, ok = r.ExporterResponse[exptypes.ExporterImageDigestKey]
if ok { if ok {
descs = append(descs, specs.Descriptor{ descs = append(descs, specs.Descriptor{
@@ -833,7 +835,7 @@ func remoteDigestWithMoby(ctx context.Context, d *driver.DriverHandle, name stri
if err != nil { if err != nil {
return "", err return "", err
} }
img, err := api.ImageInspect(ctx, name) img, _, err := api.ImageInspectWithRaw(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
} }

View File

@@ -4,7 +4,6 @@ import (
"context" "context"
stderrors "errors" stderrors "errors"
"net" "net"
"slices"
"github.com/containerd/platforms" "github.com/containerd/platforms"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
@@ -38,7 +37,15 @@ func Dial(ctx context.Context, nodes []builder.Node, pw progress.Writer, platfor
for _, ls := range resolved { for _, ls := range resolved {
for _, rn := range ls { for _, rn := range ls {
if platform != nil { if platform != nil {
if !slices.ContainsFunc(rn.platforms, platforms.Only(*platform).Match) { p := *platform
var found bool
for _, pp := range rn.platforms {
if platforms.Only(p).Match(pp) {
found = true
break
}
}
if !found {
continue continue
} }
} }

View File

@@ -3,7 +3,6 @@ package build
import ( import (
"context" "context"
"fmt" "fmt"
"slices"
"sync" "sync"
"github.com/containerd/platforms" "github.com/containerd/platforms"
@@ -222,7 +221,7 @@ func (r *nodeResolver) get(p specs.Platform, matcher matchMaker, additionalPlatf
for i, node := range r.nodes { for i, node := range r.nodes {
platforms := node.Platforms platforms := node.Platforms
if additionalPlatforms != nil { if additionalPlatforms != nil {
platforms = slices.Clone(platforms) platforms = append([]specs.Platform{}, platforms...)
platforms = append(platforms, additionalPlatforms(i, node)...) platforms = append(platforms, additionalPlatforms(i, node)...)
} }
for _, p2 := range platforms { for _, p2 := range platforms {

View File

@@ -2,7 +2,6 @@ package build
import ( import (
"context" "context"
"maps"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
@@ -128,7 +127,9 @@ func getGitAttributes(ctx context.Context, contextPath, dockerfilePath string) (
if so.FrontendAttrs == nil { if so.FrontendAttrs == nil {
so.FrontendAttrs = make(map[string]string) so.FrontendAttrs = make(map[string]string)
} }
maps.Copy(so.FrontendAttrs, res) for k, v := range res {
so.FrontendAttrs[k] = v
}
if !setGitInfo || root == "" { if !setGitInfo || root == "" {
return return

View File

@@ -9,7 +9,6 @@ import (
"testing" "testing"
"github.com/docker/buildx/util/gitutil" "github.com/docker/buildx/util/gitutil"
"github.com/docker/buildx/util/gitutil/gittestutil"
"github.com/moby/buildkit/client" "github.com/moby/buildkit/client"
specs "github.com/opencontainers/image-spec/specs-go/v1" specs "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@@ -17,18 +16,18 @@ import (
) )
func setupTest(tb testing.TB) { func setupTest(tb testing.TB) {
gittestutil.Mktmp(tb) gitutil.Mktmp(tb)
c, err := gitutil.New() c, err := gitutil.New()
require.NoError(tb, err) require.NoError(tb, err)
gittestutil.GitInit(c, tb) gitutil.GitInit(c, tb)
df := []byte("FROM alpine:latest\n") df := []byte("FROM alpine:latest\n")
require.NoError(tb, os.WriteFile("Dockerfile", df, 0644)) require.NoError(tb, os.WriteFile("Dockerfile", df, 0644))
gittestutil.GitAdd(c, tb, "Dockerfile") gitutil.GitAdd(c, tb, "Dockerfile")
gittestutil.GitCommit(c, tb, "initial commit") gitutil.GitCommit(c, tb, "initial commit")
gittestutil.GitSetRemote(c, tb, "origin", "git@github.com:docker/buildx.git") gitutil.GitSetRemote(c, tb, "origin", "git@github.com:docker/buildx.git")
} }
func TestGetGitAttributesNotGitRepo(t *testing.T) { func TestGetGitAttributesNotGitRepo(t *testing.T) {
@@ -189,19 +188,19 @@ func TestLocalDirs(t *testing.T) {
} }
func TestLocalDirsSub(t *testing.T) { func TestLocalDirsSub(t *testing.T) {
gittestutil.Mktmp(t) gitutil.Mktmp(t)
c, err := gitutil.New() c, err := gitutil.New()
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(c, t) gitutil.GitInit(c, t)
df := []byte("FROM alpine:latest\n") df := []byte("FROM alpine:latest\n")
require.NoError(t, os.MkdirAll("app", 0755)) require.NoError(t, os.MkdirAll("app", 0755))
require.NoError(t, os.WriteFile("app/Dockerfile", df, 0644)) require.NoError(t, os.WriteFile("app/Dockerfile", df, 0644))
gittestutil.GitAdd(c, t, "app/Dockerfile") gitutil.GitAdd(c, t, "app/Dockerfile")
gittestutil.GitCommit(c, t, "initial commit") gitutil.GitCommit(c, t, "initial commit")
gittestutil.GitSetRemote(c, t, "origin", "git@github.com:docker/buildx.git") gitutil.GitSetRemote(c, t, "origin", "git@github.com:docker/buildx.git")
so := &client.SolveOpt{ so := &client.SolveOpt{
FrontendAttrs: map[string]string{}, FrontendAttrs: map[string]string{},

View File

@@ -318,7 +318,7 @@ func toSolveOpt(ctx context.Context, node builder.Node, multiDriver bool, opt *O
switch opt.NetworkMode { switch opt.NetworkMode {
case "host": case "host":
so.FrontendAttrs["force-network-mode"] = opt.NetworkMode so.FrontendAttrs["force-network-mode"] = opt.NetworkMode
so.AllowedEntitlements = append(so.AllowedEntitlements, entitlements.EntitlementNetworkHost.String()) so.AllowedEntitlements = append(so.AllowedEntitlements, entitlements.EntitlementNetworkHost)
case "none": case "none":
so.FrontendAttrs["force-network-mode"] = opt.NetworkMode so.FrontendAttrs["force-network-mode"] = opt.NetworkMode
case "", "default": case "", "default":

View File

@@ -5,7 +5,6 @@ import (
"encoding/base64" "encoding/base64"
"encoding/json" "encoding/json"
"io" "io"
"maps"
"strings" "strings"
"sync" "sync"
@@ -41,7 +40,9 @@ func setRecordProvenance(ctx context.Context, c *client.Client, sr *client.Solve
if err != nil { if err != nil {
return err return err
} }
maps.Copy(sr.ExporterResponse, res) for k, v := range res {
sr.ExporterResponse[k] = v
}
return nil return nil
}) })
} }

View File

@@ -28,11 +28,11 @@ func TestSyncMultiReaderParallel(t *testing.T) {
readers := make([]io.ReadCloser, numReaders) readers := make([]io.ReadCloser, numReaders)
for i := range numReaders { for i := 0; i < numReaders; i++ {
readers[i] = mr.NewReadCloser() readers[i] = mr.NewReadCloser()
} }
for i := range numReaders { for i := 0; i < numReaders; i++ {
wg.Add(1) wg.Add(1)
go func(readerId int) { go func(readerId int) {
defer wg.Done() defer wg.Done()

View File

@@ -5,7 +5,6 @@ import (
"encoding/json" "encoding/json"
"net/url" "net/url"
"os" "os"
"slices"
"sort" "sort"
"strings" "strings"
"sync" "sync"
@@ -200,7 +199,7 @@ func (b *Builder) Boot(ctx context.Context) (bool, error) {
err = err1 err = err1
} }
if err == nil && len(errCh) > 0 { if err == nil && len(errCh) == len(toBoot) {
return false, <-errCh return false, <-errCh
} }
return true, err return true, err
@@ -657,7 +656,13 @@ func parseBuildkitdFlags(inp string, driver string, driverOpts map[string]string
flags.StringArrayVar(&allowInsecureEntitlements, "allow-insecure-entitlement", nil, "") flags.StringArrayVar(&allowInsecureEntitlements, "allow-insecure-entitlement", nil, "")
_ = flags.Parse(res) _ = flags.Parse(res)
hasNetworkHostEntitlement := slices.Contains(allowInsecureEntitlements, "network.host") var hasNetworkHostEntitlement bool
for _, e := range allowInsecureEntitlements {
if e == "network.host" {
hasNetworkHostEntitlement = true
break
}
}
var hasNetworkHostEntitlementInConf bool var hasNetworkHostEntitlementInConf bool
if buildkitdConfigFile != "" { if buildkitdConfigFile != "" {
@@ -666,8 +671,11 @@ func parseBuildkitdFlags(inp string, driver string, driverOpts map[string]string
return nil, err return nil, err
} else if btoml != nil { } else if btoml != nil {
if ies := btoml.GetArray("insecure-entitlements"); ies != nil { if ies := btoml.GetArray("insecure-entitlements"); ies != nil {
if slices.Contains(ies.([]string), "network.host") { for _, e := range ies.([]string) {
hasNetworkHostEntitlementInConf = true if e == "network.host" {
hasNetworkHostEntitlementInConf = true
break
}
} }
} }
} }

View File

@@ -32,11 +32,10 @@ type Node struct {
Err error Err error
// worker settings // worker settings
IDs []string IDs []string
Platforms []ocispecs.Platform Platforms []ocispecs.Platform
GCPolicy []client.PruneInfo GCPolicy []client.PruneInfo
Labels map[string]string Labels map[string]string
CDIDevices []client.CDIDevice
} }
// Nodes returns nodes for this builder. // Nodes returns nodes for this builder.
@@ -169,7 +168,7 @@ func (b *Builder) LoadNodes(ctx context.Context, opts ...LoadNodesOption) (_ []N
// dynamic nodes are used in Kubernetes driver. // dynamic nodes are used in Kubernetes driver.
// Kubernetes' pods are dynamically mapped to BuildKit Nodes. // Kubernetes' pods are dynamically mapped to BuildKit Nodes.
if di.DriverInfo != nil && len(di.DriverInfo.DynamicNodes) > 0 { if di.DriverInfo != nil && len(di.DriverInfo.DynamicNodes) > 0 {
for i := range di.DriverInfo.DynamicNodes { for i := 0; i < len(di.DriverInfo.DynamicNodes); i++ {
diClone := di diClone := di
if pl := di.DriverInfo.DynamicNodes[i].Platforms; len(pl) > 0 { if pl := di.DriverInfo.DynamicNodes[i].Platforms; len(pl) > 0 {
diClone.Platforms = pl diClone.Platforms = pl
@@ -260,7 +259,6 @@ func (n *Node) loadData(ctx context.Context, clientOpt ...client.ClientOpt) erro
n.GCPolicy = w.GCPolicy n.GCPolicy = w.GCPolicy
n.Labels = w.Labels n.Labels = w.Labels
} }
n.CDIDevices = w.CDIDevices
} }
sort.Strings(n.IDs) sort.Strings(n.IDs)
n.Platforms = platformutil.Dedupe(n.Platforms) n.Platforms = platformutil.Dedupe(n.Platforms)

View File

@@ -4,17 +4,17 @@ import (
"context" "context"
"fmt" "fmt"
"os" "os"
"path/filepath"
"github.com/docker/buildx/commands" "github.com/docker/buildx/commands"
controllererrors "github.com/docker/buildx/controller/errdefs" controllererrors "github.com/docker/buildx/controller/errdefs"
"github.com/docker/buildx/util/desktop" "github.com/docker/buildx/util/desktop"
"github.com/docker/buildx/version" "github.com/docker/buildx/version"
"github.com/docker/cli/cli" "github.com/docker/cli/cli"
"github.com/docker/cli/cli-plugins/metadata" "github.com/docker/cli/cli-plugins/manager"
"github.com/docker/cli/cli-plugins/plugin" "github.com/docker/cli/cli-plugins/plugin"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/cli/cli/debug" "github.com/docker/cli/cli/debug"
cliflags "github.com/docker/cli/cli/flags"
"github.com/moby/buildkit/solver/errdefs" "github.com/moby/buildkit/solver/errdefs"
"github.com/moby/buildkit/util/stack" "github.com/moby/buildkit/util/stack"
"github.com/pkg/errors" "github.com/pkg/errors"
@@ -36,9 +36,12 @@ func init() {
} }
func runStandalone(cmd *command.DockerCli) error { func runStandalone(cmd *command.DockerCli) error {
if err := cmd.Initialize(cliflags.NewClientOptions()); err != nil {
return err
}
defer flushMetrics(cmd) defer flushMetrics(cmd)
executable := os.Args[0]
rootCmd := commands.NewRootCmd(filepath.Base(executable), false, cmd) rootCmd := commands.NewRootCmd(os.Args[0], false, cmd)
return rootCmd.Execute() return rootCmd.Execute()
} }
@@ -59,7 +62,7 @@ func flushMetrics(cmd *command.DockerCli) {
func runPlugin(cmd *command.DockerCli) error { func runPlugin(cmd *command.DockerCli) error {
rootCmd := commands.NewRootCmd("buildx", true, cmd) rootCmd := commands.NewRootCmd("buildx", true, cmd)
return plugin.RunPlugin(cmd, rootCmd, metadata.Metadata{ return plugin.RunPlugin(cmd, rootCmd, manager.Metadata{
SchemaVersion: "0.1.0", SchemaVersion: "0.1.0",
Vendor: "Docker Inc.", Vendor: "Docker Inc.",
Version: version.Version, Version: version.Version,

View File

@@ -66,11 +66,7 @@ type bakeOptions struct {
func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in bakeOptions, cFlags commonFlags) (err error) { func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in bakeOptions, cFlags commonFlags) (err error) {
mp := dockerCli.MeterProvider() mp := dockerCli.MeterProvider()
ctx, end, err := tracing.TraceCurrentCommand(ctx, append([]string{"bake"}, targets...), ctx, end, err := tracing.TraceCurrentCommand(ctx, "bake")
attribute.String("builder", in.builder),
attribute.StringSlice("targets", targets),
attribute.StringSlice("files", in.files),
)
if err != nil { if err != nil {
return err return err
} }
@@ -275,10 +271,8 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
if err != nil { if err != nil {
return err return err
} }
if progressMode != progressui.RawJSONMode { if err := exp.Prompt(ctx, url != "", &syncWriter{w: dockerCli.Err(), wait: printer.Wait}); err != nil {
if err := exp.Prompt(ctx, url != "", &syncWriter{w: dockerCli.Err(), wait: printer.Wait}); err != nil { return err
return err
}
} }
if printer.IsDone() { if printer.IsDone() {
// init new printer as old one was stopped to show the prompt // init new printer as old one was stopped to show the prompt
@@ -287,7 +281,7 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
} }
} }
if err := saveLocalStateGroup(dockerCli, in, targets, bo); err != nil { if err := saveLocalStateGroup(dockerCli, in, targets, bo, overrides, def); err != nil {
return err return err
} }
@@ -309,7 +303,7 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
desktop.PrintBuildDetails(os.Stderr, printer.BuildRefs(), term) desktop.PrintBuildDetails(os.Stderr, printer.BuildRefs(), term)
} }
if len(in.metadataFile) > 0 { if len(in.metadataFile) > 0 {
dt := make(map[string]any) dt := make(map[string]interface{})
for t, r := range resp { for t, r := range resp {
dt[t] = decodeExporterResponse(r.ExporterResponse) dt[t] = decodeExporterResponse(r.ExporterResponse)
} }
@@ -492,14 +486,7 @@ func bakeCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
return cmd return cmd
} }
func saveLocalStateGroup(dockerCli command.Cli, in bakeOptions, targets []string, bo map[string]build.Options) error { func saveLocalStateGroup(dockerCli command.Cli, in bakeOptions, targets []string, bo map[string]build.Options, overrides []string, def any) error {
l, err := localstate.New(confutil.NewConfig(dockerCli))
if err != nil {
return err
}
defer l.MigrateIfNeeded()
prm := confutil.MetadataProvenance() prm := confutil.MetadataProvenance()
if len(in.metadataFile) == 0 { if len(in.metadataFile) == 0 {
prm = confutil.MetadataProvenanceModeDisabled prm = confutil.MetadataProvenanceModeDisabled
@@ -519,10 +506,19 @@ func saveLocalStateGroup(dockerCli command.Cli, in bakeOptions, targets []string
if len(refs) == 0 { if len(refs) == 0 {
return nil return nil
} }
l, err := localstate.New(confutil.NewConfig(dockerCli))
if err != nil {
return err
}
dtdef, err := json.MarshalIndent(def, "", " ")
if err != nil {
return err
}
return l.SaveGroup(groupRef, localstate.StateGroup{ return l.SaveGroup(groupRef, localstate.StateGroup{
Refs: refs, Definition: dtdef,
Targets: targets, Targets: targets,
Inputs: overrides,
Refs: refs,
}) })
} }

View File

@@ -11,7 +11,6 @@ import (
"io" "io"
"os" "os"
"path/filepath" "path/filepath"
"slices"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
@@ -42,7 +41,7 @@ import (
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
dockeropts "github.com/docker/cli/opts" dockeropts "github.com/docker/cli/opts"
"github.com/docker/docker/api/types/versions" "github.com/docker/docker/api/types/versions"
"github.com/docker/docker/pkg/atomicwriter" "github.com/docker/docker/pkg/ioutils"
"github.com/moby/buildkit/client" "github.com/moby/buildkit/client"
"github.com/moby/buildkit/exporter/containerimage/exptypes" "github.com/moby/buildkit/exporter/containerimage/exptypes"
"github.com/moby/buildkit/frontend/subrequests" "github.com/moby/buildkit/frontend/subrequests"
@@ -157,7 +156,7 @@ func (o *buildOptions) toControllerOptions() (*controllerapi.BuildOptions, error
return nil, err return nil, err
} }
inAttests := slices.Clone(o.attests) inAttests := append([]string{}, o.attests...)
if o.provenance != "" { if o.provenance != "" {
inAttests = append(inAttests, buildflags.CanonicalizeAttest("provenance", o.provenance)) inAttests = append(inAttests, buildflags.CanonicalizeAttest("provenance", o.provenance))
} }
@@ -286,11 +285,7 @@ func (o *buildOptionsHash) String() string {
func runBuild(ctx context.Context, dockerCli command.Cli, options buildOptions) (err error) { func runBuild(ctx context.Context, dockerCli command.Cli, options buildOptions) (err error) {
mp := dockerCli.MeterProvider() mp := dockerCli.MeterProvider()
ctx, end, err := tracing.TraceCurrentCommand(ctx, []string{"build", options.contextPath}, ctx, end, err := tracing.TraceCurrentCommand(ctx, "build")
attribute.String("builder", options.builder),
attribute.String("context", options.contextPath),
attribute.String("dockerfile", options.dockerfileName),
)
if err != nil { if err != nil {
return err return err
} }
@@ -471,7 +466,7 @@ func runControllerBuild(ctx context.Context, dockerCli command.Cli, opts *contro
if err != nil { if err != nil {
var be *controllererrors.BuildError var be *controllererrors.BuildError
if errors.As(err, &be) { if errors.As(err, &be) {
ref = be.SessionID ref = be.Ref
retErr = err retErr = err
// We can proceed to monitor // We can proceed to monitor
} else { } else {
@@ -598,7 +593,7 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
flags.StringSliceVar(&options.extraHosts, "add-host", []string{}, `Add a custom host-to-IP mapping (format: "host:ip")`) flags.StringSliceVar(&options.extraHosts, "add-host", []string{}, `Add a custom host-to-IP mapping (format: "host:ip")`)
flags.StringArrayVar(&options.allow, "allow", []string{}, `Allow extra privileged entitlement (e.g., "network.host", "security.insecure")`) flags.StringSliceVar(&options.allow, "allow", []string{}, `Allow extra privileged entitlement (e.g., "network.host", "security.insecure")`)
flags.StringArrayVarP(&options.annotations, "annotation", "", []string{}, "Add annotation to the image") flags.StringArrayVarP(&options.annotations, "annotation", "", []string{}, "Add annotation to the image")
@@ -745,15 +740,15 @@ func checkWarnedFlags(f *pflag.Flag) {
} }
} }
func writeMetadataFile(filename string, dt any) error { func writeMetadataFile(filename string, dt interface{}) error {
b, err := json.MarshalIndent(dt, "", " ") b, err := json.MarshalIndent(dt, "", " ")
if err != nil { if err != nil {
return err return err
} }
return atomicwriter.WriteFile(filename, b, 0644) return ioutils.AtomicWriteFile(filename, b, 0644)
} }
func decodeExporterResponse(exporterResponse map[string]string) map[string]any { func decodeExporterResponse(exporterResponse map[string]string) map[string]interface{} {
decFunc := func(k, v string) ([]byte, error) { decFunc := func(k, v string) ([]byte, error) {
if k == "result.json" { if k == "result.json" {
// result.json is part of metadata response for subrequests which // result.json is part of metadata response for subrequests which
@@ -762,16 +757,16 @@ func decodeExporterResponse(exporterResponse map[string]string) map[string]any {
} }
return base64.StdEncoding.DecodeString(v) return base64.StdEncoding.DecodeString(v)
} }
out := make(map[string]any) out := make(map[string]interface{})
for k, v := range exporterResponse { for k, v := range exporterResponse {
dt, err := decFunc(k, v) dt, err := decFunc(k, v)
if err != nil { if err != nil {
out[k] = v out[k] = v
continue continue
} }
var raw map[string]any var raw map[string]interface{}
if err = json.Unmarshal(dt, &raw); err != nil || len(raw) == 0 { if err = json.Unmarshal(dt, &raw); err != nil || len(raw) == 0 {
var rawList []map[string]any var rawList []map[string]interface{}
if err = json.Unmarshal(dt, &rawList); err != nil || len(rawList) == 0 { if err = json.Unmarshal(dt, &rawList); err != nil || len(rawList) == 0 {
out[k] = v out[k] = v
continue continue

View File

@@ -124,7 +124,7 @@ func duCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
return cmd return cmd
} }
func printKV(w io.Writer, k string, v any) { func printKV(w io.Writer, k string, v interface{}) {
fmt.Fprintf(w, "%s:\t%v\n", k, v) fmt.Fprintf(w, "%s:\t%v\n", k, v)
} }

View File

@@ -1,160 +0,0 @@
package history
import (
"context"
"io"
"os"
"slices"
"github.com/containerd/console"
"github.com/containerd/platforms"
"github.com/docker/buildx/builder"
"github.com/docker/buildx/localstate"
"github.com/docker/buildx/util/cobrautil/completion"
"github.com/docker/buildx/util/confutil"
"github.com/docker/buildx/util/desktop/bundle"
"github.com/docker/cli/cli/command"
"github.com/moby/buildkit/client"
"github.com/pkg/errors"
"github.com/spf13/cobra"
)
type exportOptions struct {
builder string
refs []string
output string
all bool
}
func runExport(ctx context.Context, dockerCli command.Cli, opts exportOptions) error {
b, err := builder.New(dockerCli, builder.WithName(opts.builder))
if err != nil {
return err
}
nodes, err := b.LoadNodes(ctx, builder.WithData())
if err != nil {
return err
}
for _, node := range nodes {
if node.Err != nil {
return node.Err
}
}
if len(opts.refs) == 0 {
opts.refs = []string{""}
}
var res []historyRecord
for _, ref := range opts.refs {
recs, err := queryRecords(ctx, ref, nodes, &queryOptions{
CompletedOnly: true,
})
if err != nil {
return err
}
if len(recs) == 0 {
if ref == "" {
return errors.New("no records found")
}
return errors.Errorf("no record found for ref %q", ref)
}
if ref == "" {
slices.SortFunc(recs, func(a, b historyRecord) int {
return b.CreatedAt.AsTime().Compare(a.CreatedAt.AsTime())
})
}
if opts.all {
res = append(res, recs...)
break
} else {
res = append(res, recs[0])
}
}
ls, err := localstate.New(confutil.NewConfig(dockerCli))
if err != nil {
return err
}
visited := map[*builder.Node]struct{}{}
var clients []*client.Client
for _, rec := range res {
if _, ok := visited[rec.node]; ok {
continue
}
c, err := rec.node.Driver.Client(ctx)
if err != nil {
return err
}
clients = append(clients, c)
}
toExport := make([]*bundle.Record, 0, len(res))
for _, rec := range res {
var defaultPlatform string
if p := rec.node.Platforms; len(p) > 0 {
defaultPlatform = platforms.FormatAll(platforms.Normalize(p[0]))
}
var stg *localstate.StateGroup
st, _ := ls.ReadRef(rec.node.Builder, rec.node.Name, rec.Ref)
if st != nil && st.GroupRef != "" {
stg, err = ls.ReadGroup(st.GroupRef)
if err != nil {
return err
}
}
toExport = append(toExport, &bundle.Record{
BuildHistoryRecord: rec.BuildHistoryRecord,
DefaultPlatform: defaultPlatform,
LocalState: st,
StateGroup: stg,
})
}
var w io.Writer = os.Stdout
if opts.output != "" {
f, err := os.Create(opts.output)
if err != nil {
return errors.Wrapf(err, "failed to create output file %q", opts.output)
}
defer f.Close()
w = f
} else {
if _, err := console.ConsoleFromFile(os.Stdout); err == nil {
return errors.Errorf("refusing to write to console, use --output to specify a file")
}
}
return bundle.Export(ctx, clients, w, toExport)
}
func exportCmd(dockerCli command.Cli, rootOpts RootOptions) *cobra.Command {
var options exportOptions
cmd := &cobra.Command{
Use: "export [OPTIONS] [REF]",
Short: "Export a build into Docker Desktop bundle",
RunE: func(cmd *cobra.Command, args []string) error {
if options.all && len(args) > 0 {
return errors.New("cannot specify refs when using --all")
}
options.refs = args
options.builder = *rootOpts.Builder
return runExport(cmd.Context(), dockerCli, options)
},
ValidArgsFunction: completion.Disable,
}
flags := cmd.Flags()
flags.StringVarP(&options.output, "output", "o", "", "Output file path")
flags.BoolVar(&options.all, "all", false, "Export all records for the builder")
return cmd
}

View File

@@ -1,135 +0,0 @@
package history
import (
"context"
"encoding/json"
"fmt"
"io"
"net"
"net/http"
"os"
"strings"
remoteutil "github.com/docker/buildx/driver/remote/util"
"github.com/docker/buildx/util/cobrautil/completion"
"github.com/docker/buildx/util/desktop"
"github.com/docker/cli/cli/command"
"github.com/pkg/browser"
"github.com/pkg/errors"
"github.com/spf13/cobra"
)
type importOptions struct {
file []string
}
func runImport(ctx context.Context, dockerCli command.Cli, opts importOptions) error {
sock, err := desktop.BuildServerAddr()
if err != nil {
return err
}
tr := http.DefaultTransport.(*http.Transport).Clone()
tr.DialContext = func(ctx context.Context, _, _ string) (net.Conn, error) {
network, addr, ok := strings.Cut(sock, "://")
if !ok {
return nil, errors.Errorf("invalid endpoint address: %s", sock)
}
return remoteutil.DialContext(ctx, network, addr)
}
client := &http.Client{
Transport: tr,
}
var urls []string
if len(opts.file) == 0 {
u, err := importFrom(ctx, client, os.Stdin)
if err != nil {
return err
}
urls = append(urls, u...)
} else {
for _, fn := range opts.file {
var f *os.File
var rdr io.Reader = os.Stdin
if fn != "-" {
f, err = os.Open(fn)
if err != nil {
return errors.Wrapf(err, "failed to open file %s", fn)
}
rdr = f
}
u, err := importFrom(ctx, client, rdr)
if err != nil {
return err
}
urls = append(urls, u...)
if f != nil {
f.Close()
}
}
}
if len(urls) == 0 {
return errors.New("no build records found in the bundle")
}
for i, url := range urls {
fmt.Fprintln(dockerCli.Err(), url)
if i == 0 {
err = browser.OpenURL(url)
}
}
return err
}
func importFrom(ctx context.Context, c *http.Client, rdr io.Reader) ([]string, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, "http://docker-desktop/upload", rdr)
if err != nil {
return nil, errors.Wrap(err, "failed to create request")
}
resp, err := c.Do(req)
if err != nil {
return nil, errors.Wrap(err, "failed to send request, check if Docker Desktop is running")
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, errors.Errorf("failed to import build: %s", string(body))
}
var refs []string
dec := json.NewDecoder(resp.Body)
if err := dec.Decode(&refs); err != nil {
return nil, errors.Wrap(err, "failed to decode response")
}
var urls []string
for _, ref := range refs {
urls = append(urls, desktop.BuildURL(fmt.Sprintf(".imported/_/%s", ref)))
}
return urls, err
}
func importCmd(dockerCli command.Cli, _ RootOptions) *cobra.Command {
var options importOptions
cmd := &cobra.Command{
Use: "import [OPTIONS] < bundle.dockerbuild",
Short: "Import a build into Docker Desktop",
Args: cobra.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
return runImport(cmd.Context(), dockerCli, options)
},
ValidArgsFunction: completion.Disable,
}
flags := cmd.Flags()
flags.StringArrayVarP(&options.file, "file", "f", nil, "Import from a file path")
return cmd
}

View File

@@ -13,7 +13,6 @@ import (
"strconv" "strconv"
"strings" "strings"
"text/tabwriter" "text/tabwriter"
"text/template"
"time" "time"
"github.com/containerd/containerd/v2/core/content" "github.com/containerd/containerd/v2/core/content"
@@ -26,7 +25,6 @@ import (
"github.com/docker/buildx/util/confutil" "github.com/docker/buildx/util/confutil"
"github.com/docker/buildx/util/desktop" "github.com/docker/buildx/util/desktop"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/cli/cli/command/formatter"
"github.com/docker/cli/cli/debug" "github.com/docker/cli/cli/debug"
slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common"
slsa02 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" slsa02 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2"
@@ -47,114 +45,9 @@ import (
proto "google.golang.org/protobuf/proto" proto "google.golang.org/protobuf/proto"
) )
type statusT string
const (
statusComplete statusT = "completed"
statusRunning statusT = "running"
statusError statusT = "failed"
statusCanceled statusT = "canceled"
)
type inspectOptions struct { type inspectOptions struct {
builder string builder string
ref string ref string
format string
}
type inspectOutput struct {
Name string `json:",omitempty"`
Ref string
Context string `json:",omitempty"`
Dockerfile string `json:",omitempty"`
VCSRepository string `json:",omitempty"`
VCSRevision string `json:",omitempty"`
Target string `json:",omitempty"`
Platform []string `json:",omitempty"`
KeepGitDir bool `json:",omitempty"`
NamedContexts []keyValueOutput `json:",omitempty"`
StartedAt *time.Time `json:",omitempty"`
CompletedAt *time.Time `json:",omitempty"`
Duration time.Duration `json:",omitempty"`
Status statusT `json:",omitempty"`
Error *errorOutput `json:",omitempty"`
NumCompletedSteps int32
NumTotalSteps int32
NumCachedSteps int32
BuildArgs []keyValueOutput `json:",omitempty"`
Labels []keyValueOutput `json:",omitempty"`
Config configOutput `json:",omitempty"`
Materials []materialOutput `json:",omitempty"`
Attachments []attachmentOutput `json:",omitempty"`
Errors []string `json:",omitempty"`
}
type configOutput struct {
Network string `json:",omitempty"`
ExtraHosts []string `json:",omitempty"`
Hostname string `json:",omitempty"`
CgroupParent string `json:",omitempty"`
ImageResolveMode string `json:",omitempty"`
MultiPlatform bool `json:",omitempty"`
NoCache bool `json:",omitempty"`
NoCacheFilter []string `json:",omitempty"`
ShmSize string `json:",omitempty"`
Ulimit string `json:",omitempty"`
CacheMountNS string `json:",omitempty"`
DockerfileCheckConfig string `json:",omitempty"`
SourceDateEpoch string `json:",omitempty"`
SandboxHostname string `json:",omitempty"`
RestRaw []keyValueOutput `json:",omitempty"`
}
type materialOutput struct {
URI string `json:",omitempty"`
Digests []string `json:",omitempty"`
}
type attachmentOutput struct {
Digest string `json:",omitempty"`
Platform string `json:",omitempty"`
Type string `json:",omitempty"`
}
type errorOutput struct {
Code int `json:",omitempty"`
Message string `json:",omitempty"`
Name string `json:",omitempty"`
Logs []string `json:",omitempty"`
Sources []byte `json:",omitempty"`
Stack []byte `json:",omitempty"`
}
type keyValueOutput struct {
Name string `json:",omitempty"`
Value string `json:",omitempty"`
}
func readAttr[T any](attrs map[string]string, k string, dest *T, f func(v string) (T, bool)) {
if sv, ok := attrs[k]; ok {
if f != nil {
v, ok := f(sv)
if ok {
*dest = v
}
}
if d, ok := any(dest).(*string); ok {
*d = sv
}
}
delete(attrs, k)
} }
func runInspect(ctx context.Context, dockerCli command.Cli, opts inspectOptions) error { func runInspect(ctx context.Context, dockerCli command.Cli, opts inspectOptions) error {
@@ -173,7 +66,7 @@ func runInspect(ctx context.Context, dockerCli command.Cli, opts inspectOptions)
} }
} }
recs, err := queryRecords(ctx, opts.ref, nodes, nil) recs, err := queryRecords(ctx, opts.ref, nodes)
if err != nil { if err != nil {
return err return err
} }
@@ -185,26 +78,13 @@ func runInspect(ctx context.Context, dockerCli command.Cli, opts inspectOptions)
return errors.Errorf("no record found for ref %q", opts.ref) return errors.Errorf("no record found for ref %q", opts.ref)
} }
if opts.ref == "" {
slices.SortFunc(recs, func(a, b historyRecord) int {
return b.CreatedAt.AsTime().Compare(a.CreatedAt.AsTime())
})
}
rec := &recs[0] rec := &recs[0]
c, err := rec.node.Driver.Client(ctx)
if err != nil {
return err
}
store := proxy.NewContentStore(c.ContentClient())
var defaultPlatform string
workers, err := c.ListWorkers(ctx)
if err != nil {
return errors.Wrap(err, "failed to list workers")
}
workers0:
for _, w := range workers {
for _, p := range w.Platforms {
defaultPlatform = platforms.FormatAll(platforms.Normalize(p))
break workers0
}
}
ls, err := localstate.New(confutil.NewConfig(dockerCli)) ls, err := localstate.New(confutil.NewConfig(dockerCli))
if err != nil { if err != nil {
@@ -212,10 +92,22 @@ workers0:
} }
st, _ := ls.ReadRef(rec.node.Builder, rec.node.Name, rec.Ref) st, _ := ls.ReadRef(rec.node.Builder, rec.node.Name, rec.Ref)
tw := tabwriter.NewWriter(dockerCli.Out(), 1, 8, 1, '\t', 0)
attrs := rec.FrontendAttrs attrs := rec.FrontendAttrs
delete(attrs, "frontend.caps") delete(attrs, "frontend.caps")
var out inspectOutput writeAttr := func(k, name string, f func(v string) (string, bool)) {
if v, ok := attrs[k]; ok {
if f != nil {
v, ok = f(v)
}
if ok {
fmt.Fprintf(tw, "%s:\t%s\n", name, v)
}
}
delete(attrs, k)
}
var context string var context string
var dockerfile string var dockerfile string
@@ -254,171 +146,131 @@ workers0:
} }
delete(attrs, "filename") delete(attrs, "filename")
out.Name = buildName(rec.FrontendAttrs, st) if context != "" {
out.Ref = rec.Ref fmt.Fprintf(tw, "Context:\t%s\n", context)
}
out.Context = context if dockerfile != "" {
out.Dockerfile = dockerfile fmt.Fprintf(tw, "Dockerfile:\t%s\n", dockerfile)
}
if _, ok := attrs["context"]; !ok { if _, ok := attrs["context"]; !ok {
if src, ok := attrs["vcs:source"]; ok { if src, ok := attrs["vcs:source"]; ok {
out.VCSRepository = src fmt.Fprintf(tw, "VCS Repository:\t%s\n", src)
} }
if rev, ok := attrs["vcs:revision"]; ok { if rev, ok := attrs["vcs:revision"]; ok {
out.VCSRevision = rev fmt.Fprintf(tw, "VCS Revision:\t%s\n", rev)
} }
} }
readAttr(attrs, "target", &out.Target, nil) writeAttr("target", "Target", nil)
writeAttr("platform", "Platform", func(v string) (string, bool) {
readAttr(attrs, "platform", &out.Platform, func(v string) ([]string, bool) { return tryParseValue(v, func(v string) (string, error) {
return tryParseValue(v, &out.Errors, func(v string) ([]string, error) {
var pp []string var pp []string
for _, v := range strings.Split(v, ",") { for _, v := range strings.Split(v, ",") {
p, err := platforms.Parse(v) p, err := platforms.Parse(v)
if err != nil { if err != nil {
return nil, err return "", err
} }
pp = append(pp, platforms.FormatAll(platforms.Normalize(p))) pp = append(pp, platforms.FormatAll(platforms.Normalize(p)))
} }
if len(pp) == 0 { return strings.Join(pp, ", "), nil
pp = append(pp, defaultPlatform) }), true
}
return pp, nil
})
}) })
writeAttr("build-arg:BUILDKIT_CONTEXT_KEEP_GIT_DIR", "Keep Git Dir", func(v string) (string, bool) {
readAttr(attrs, "build-arg:BUILDKIT_CONTEXT_KEEP_GIT_DIR", &out.KeepGitDir, func(v string) (bool, bool) { return tryParseValue(v, func(v string) (string, error) {
return tryParseValue(v, &out.Errors, strconv.ParseBool) b, err := strconv.ParseBool(v)
})
out.NamedContexts = readKeyValues(attrs, "context:")
if rec.CreatedAt != nil {
tm := rec.CreatedAt.AsTime().Local()
out.StartedAt = &tm
}
out.Status = statusRunning
if rec.CompletedAt != nil {
tm := rec.CompletedAt.AsTime().Local()
out.CompletedAt = &tm
out.Status = statusComplete
}
if rec.Error != nil || rec.ExternalError != nil {
out.Error = &errorOutput{}
if rec.Error != nil {
if codes.Code(rec.Error.Code) == codes.Canceled {
out.Status = statusCanceled
} else {
out.Status = statusError
}
out.Error.Code = int(codes.Code(rec.Error.Code))
out.Error.Message = rec.Error.Message
}
if rec.ExternalError != nil {
dt, err := content.ReadBlob(ctx, store, ociDesc(rec.ExternalError))
if err != nil { if err != nil {
return errors.Wrapf(err, "failed to read external error %s", rec.ExternalError.Digest) return "", err
} }
var st spb.Status return strconv.FormatBool(b), nil
if err := proto.Unmarshal(dt, &st); err != nil { }), true
return errors.Wrapf(err, "failed to unmarshal external error %s", rec.ExternalError.Digest) })
}
retErr := grpcerrors.FromGRPC(status.ErrorProto(&st))
var errsources bytes.Buffer
for _, s := range errdefs.Sources(retErr) {
s.Print(&errsources)
errsources.WriteString("\n")
}
out.Error.Sources = errsources.Bytes()
var ve *errdefs.VertexError
if errors.As(retErr, &ve) {
dgst, err := digest.Parse(ve.Vertex.Digest)
if err != nil {
return errors.Wrapf(err, "failed to parse vertex digest %s", ve.Vertex.Digest)
}
name, logs, err := loadVertexLogs(ctx, c, rec.Ref, dgst, 16)
if err != nil {
return errors.Wrapf(err, "failed to load vertex logs %s", dgst)
}
out.Error.Name = name
out.Error.Logs = logs
}
out.Error.Stack = fmt.Appendf(nil, "%+v", stack.Formatter(retErr))
}
}
if out.StartedAt != nil { tw.Flush()
if out.CompletedAt != nil {
out.Duration = out.CompletedAt.Sub(*out.StartedAt) fmt.Fprintln(dockerCli.Out())
printTable(dockerCli.Out(), attrs, "context:", "Named Context")
tw = tabwriter.NewWriter(dockerCli.Out(), 1, 8, 1, '\t', 0)
fmt.Fprintf(tw, "Started:\t%s\n", rec.CreatedAt.AsTime().Local().Format("2006-01-02 15:04:05"))
var duration time.Duration
var statusStr string
if rec.CompletedAt != nil {
duration = rec.CompletedAt.AsTime().Sub(rec.CreatedAt.AsTime())
} else {
duration = rec.currentTimestamp.Sub(rec.CreatedAt.AsTime())
statusStr = " (running)"
}
fmt.Fprintf(tw, "Duration:\t%s%s\n", formatDuration(duration), statusStr)
if rec.Error != nil {
if codes.Code(rec.Error.Code) == codes.Canceled {
fmt.Fprintf(tw, "Status:\tCanceled\n")
} else { } else {
out.Duration = rec.currentTimestamp.Sub(*out.StartedAt) fmt.Fprintf(tw, "Error:\t%s %s\n", codes.Code(rec.Error.Code).String(), rec.Error.Message)
} }
} }
fmt.Fprintf(tw, "Build Steps:\t%d/%d (%.0f%% cached)\n", rec.NumCompletedSteps, rec.NumTotalSteps, float64(rec.NumCachedSteps)/float64(rec.NumTotalSteps)*100)
tw.Flush()
out.NumCompletedSteps = rec.NumCompletedSteps fmt.Fprintln(dockerCli.Out())
out.NumTotalSteps = rec.NumTotalSteps
out.NumCachedSteps = rec.NumCachedSteps
out.BuildArgs = readKeyValues(attrs, "build-arg:") tw = tabwriter.NewWriter(dockerCli.Out(), 1, 8, 1, '\t', 0)
out.Labels = readKeyValues(attrs, "label:")
readAttr(attrs, "force-network-mode", &out.Config.Network, nil) writeAttr("force-network-mode", "Network", nil)
readAttr(attrs, "hostname", &out.Config.Hostname, nil) writeAttr("hostname", "Hostname", nil)
readAttr(attrs, "cgroup-parent", &out.Config.CgroupParent, nil) writeAttr("add-hosts", "Extra Hosts", func(v string) (string, bool) {
readAttr(attrs, "image-resolve-mode", &out.Config.ImageResolveMode, nil) return tryParseValue(v, func(v string) (string, error) {
readAttr(attrs, "build-arg:BUILDKIT_MULTI_PLATFORM", &out.Config.MultiPlatform, func(v string) (bool, bool) {
return tryParseValue(v, &out.Errors, strconv.ParseBool)
})
readAttr(attrs, "multi-platform", &out.Config.MultiPlatform, func(v string) (bool, bool) {
return tryParseValue(v, &out.Errors, strconv.ParseBool)
})
readAttr(attrs, "no-cache", &out.Config.NoCache, func(v string) (bool, bool) {
if v == "" {
return true, true
}
return false, false
})
readAttr(attrs, "no-cache", &out.Config.NoCacheFilter, func(v string) ([]string, bool) {
if v == "" {
return nil, false
}
return strings.Split(v, ","), true
})
readAttr(attrs, "add-hosts", &out.Config.ExtraHosts, func(v string) ([]string, bool) {
return tryParseValue(v, &out.Errors, func(v string) ([]string, error) {
fields, err := csvvalue.Fields(v, nil) fields, err := csvvalue.Fields(v, nil)
if err != nil { if err != nil {
return nil, err return "", err
} }
return fields, nil return strings.Join(fields, ", "), nil
}) }), true
}) })
writeAttr("cgroup-parent", "Cgroup Parent", nil)
writeAttr("image-resolve-mode", "Image Resolve Mode", nil)
writeAttr("multi-platform", "Force Multi-Platform", nil)
writeAttr("build-arg:BUILDKIT_MULTI_PLATFORM", "Force Multi-Platform", nil)
writeAttr("no-cache", "Disable Cache", func(v string) (string, bool) {
if v == "" {
return "true", true
}
return v, true
})
writeAttr("shm-size", "Shm Size", nil)
writeAttr("ulimit", "Resource Limits", nil)
writeAttr("build-arg:BUILDKIT_CACHE_MOUNT_NS", "Cache Mount Namespace", nil)
writeAttr("build-arg:BUILDKIT_DOCKERFILE_CHECK", "Dockerfile Check Config", nil)
writeAttr("build-arg:SOURCE_DATE_EPOCH", "Source Date Epoch", nil)
writeAttr("build-arg:SANDBOX_HOSTNAME", "Sandbox Hostname", nil)
readAttr(attrs, "shm-size", &out.Config.ShmSize, nil) var unusedAttrs []string
readAttr(attrs, "ulimit", &out.Config.Ulimit, nil)
readAttr(attrs, "build-arg:BUILDKIT_CACHE_MOUNT_NS", &out.Config.CacheMountNS, nil)
readAttr(attrs, "build-arg:BUILDKIT_DOCKERFILE_CHECK", &out.Config.DockerfileCheckConfig, nil)
readAttr(attrs, "build-arg:SOURCE_DATE_EPOCH", &out.Config.SourceDateEpoch, nil)
readAttr(attrs, "build-arg:SANDBOX_HOSTNAME", &out.Config.SandboxHostname, nil)
var unusedAttrs []keyValueOutput
for k := range attrs { for k := range attrs {
if strings.HasPrefix(k, "vcs:") || strings.HasPrefix(k, "build-arg:") || strings.HasPrefix(k, "label:") || strings.HasPrefix(k, "context:") || strings.HasPrefix(k, "attest:") { if strings.HasPrefix(k, "vcs:") || strings.HasPrefix(k, "build-arg:") || strings.HasPrefix(k, "label:") || strings.HasPrefix(k, "context:") || strings.HasPrefix(k, "attest:") {
continue continue
} }
unusedAttrs = append(unusedAttrs, keyValueOutput{ unusedAttrs = append(unusedAttrs, k)
Name: k,
Value: attrs[k],
})
} }
slices.SortFunc(unusedAttrs, func(a, b keyValueOutput) int { slices.Sort(unusedAttrs)
return cmp.Compare(a.Name, b.Name)
}) for _, k := range unusedAttrs {
out.Config.RestRaw = unusedAttrs fmt.Fprintf(tw, "%s:\t%s\n", k, attrs[k])
}
tw.Flush()
fmt.Fprintln(dockerCli.Out())
printTable(dockerCli.Out(), attrs, "build-arg:", "Build Arg")
printTable(dockerCli.Out(), attrs, "label:", "Label")
c, err := rec.node.Driver.Client(ctx)
if err != nil {
return err
}
store := proxy.NewContentStore(c.ContentClient())
attachments, err := allAttachments(ctx, store, *rec) attachments, err := allAttachments(ctx, store, *rec)
if err != nil { if err != nil {
@@ -430,209 +282,81 @@ workers0:
}) })
if provIndex != -1 { if provIndex != -1 {
prov := attachments[provIndex] prov := attachments[provIndex]
dt, err := content.ReadBlob(ctx, store, prov.descr) dt, err := content.ReadBlob(ctx, store, prov.descr)
if err != nil { if err != nil {
return errors.Errorf("failed to read provenance %s: %v", prov.descr.Digest, err) return errors.Errorf("failed to read provenance %s: %v", prov.descr.Digest, err)
} }
var pred provenancetypes.ProvenancePredicate var pred provenancetypes.ProvenancePredicate
if err := json.Unmarshal(dt, &pred); err != nil { if err := json.Unmarshal(dt, &pred); err != nil {
return errors.Errorf("failed to unmarshal provenance %s: %v", prov.descr.Digest, err) return errors.Errorf("failed to unmarshal provenance %s: %v", prov.descr.Digest, err)
} }
fmt.Fprintln(dockerCli.Out(), "Materials:")
tw = tabwriter.NewWriter(dockerCli.Out(), 1, 8, 1, '\t', 0)
fmt.Fprintf(tw, "URI\tDIGEST\n")
for _, m := range pred.Materials { for _, m := range pred.Materials {
out.Materials = append(out.Materials, materialOutput{ fmt.Fprintf(tw, "%s\t%s\n", m.URI, strings.Join(digestSetToDigests(m.Digest), ", "))
URI: m.URI,
Digests: digestSetToDigests(m.Digest),
})
} }
tw.Flush()
fmt.Fprintln(dockerCli.Out())
} }
if len(attachments) > 0 { if len(attachments) > 0 {
fmt.Fprintf(tw, "Attachments:\n")
tw = tabwriter.NewWriter(dockerCli.Out(), 1, 8, 1, '\t', 0)
fmt.Fprintf(tw, "DIGEST\tPLATFORM\tTYPE\n")
for _, a := range attachments { for _, a := range attachments {
p := "" p := ""
if a.platform != nil { if a.platform != nil {
p = platforms.FormatAll(*a.platform) p = platforms.FormatAll(*a.platform)
} }
out.Attachments = append(out.Attachments, attachmentOutput{ fmt.Fprintf(tw, "%s\t%s\t%s\n", a.descr.Digest, p, descrType(a.descr))
Digest: a.descr.Digest.String(),
Platform: p,
Type: descrType(a.descr),
})
} }
tw.Flush()
fmt.Fprintln(dockerCli.Out())
} }
if opts.format == formatter.JSONFormatKey { if rec.ExternalError != nil {
enc := json.NewEncoder(dockerCli.Out()) dt, err := content.ReadBlob(ctx, store, ociDesc(rec.ExternalError))
enc.SetIndent("", " ")
return enc.Encode(out)
} else if opts.format != formatter.PrettyFormatKey {
tmpl, err := template.New("inspect").Parse(opts.format)
if err != nil { if err != nil {
return errors.Wrapf(err, "failed to parse format template") return errors.Wrapf(err, "failed to read external error %s", rec.ExternalError.Digest)
} }
var buf bytes.Buffer var st spb.Status
if err := tmpl.Execute(&buf, out); err != nil { if err := proto.Unmarshal(dt, &st); err != nil {
return errors.Wrapf(err, "failed to execute format template") return errors.Wrapf(err, "failed to unmarshal external error %s", rec.ExternalError.Digest)
} }
fmt.Fprintln(dockerCli.Out(), buf.String()) retErr := grpcerrors.FromGRPC(status.ErrorProto(&st))
return nil for _, s := range errdefs.Sources(retErr) {
} s.Print(dockerCli.Out())
tw := tabwriter.NewWriter(dockerCli.Out(), 1, 8, 1, '\t', 0)
if out.Name != "" {
fmt.Fprintf(tw, "Name:\t%s\n", out.Name)
}
if opts.ref == "" && out.Ref != "" {
fmt.Fprintf(tw, "Ref:\t%s\n", out.Ref)
}
if out.Context != "" {
fmt.Fprintf(tw, "Context:\t%s\n", out.Context)
}
if out.Dockerfile != "" {
fmt.Fprintf(tw, "Dockerfile:\t%s\n", out.Dockerfile)
}
if out.VCSRepository != "" {
fmt.Fprintf(tw, "VCS Repository:\t%s\n", out.VCSRepository)
}
if out.VCSRevision != "" {
fmt.Fprintf(tw, "VCS Revision:\t%s\n", out.VCSRevision)
}
if out.Target != "" {
fmt.Fprintf(tw, "Target:\t%s\n", out.Target)
}
if len(out.Platform) > 0 {
fmt.Fprintf(tw, "Platforms:\t%s\n", strings.Join(out.Platform, ", "))
}
if out.KeepGitDir {
fmt.Fprintf(tw, "Keep Git Dir:\t%s\n", strconv.FormatBool(out.KeepGitDir))
}
tw.Flush()
fmt.Fprintln(dockerCli.Out())
printTable(dockerCli.Out(), out.NamedContexts, "Named Context")
tw = tabwriter.NewWriter(dockerCli.Out(), 1, 8, 1, '\t', 0)
fmt.Fprintf(tw, "Started:\t%s\n", out.StartedAt.Format("2006-01-02 15:04:05"))
var statusStr string
if out.Status == statusRunning {
statusStr = " (running)"
}
fmt.Fprintf(tw, "Duration:\t%s%s\n", formatDuration(out.Duration), statusStr)
if out.Status == statusError {
fmt.Fprintf(tw, "Error:\t%s %s\n", codes.Code(rec.Error.Code).String(), rec.Error.Message)
} else if out.Status == statusCanceled {
fmt.Fprintf(tw, "Status:\tCanceled\n")
}
fmt.Fprintf(tw, "Build Steps:\t%d/%d (%.0f%% cached)\n", out.NumCompletedSteps, out.NumTotalSteps, float64(out.NumCachedSteps)/float64(out.NumTotalSteps)*100)
tw.Flush()
fmt.Fprintln(dockerCli.Out())
tw = tabwriter.NewWriter(dockerCli.Out(), 1, 8, 1, '\t', 0)
if out.Config.Network != "" {
fmt.Fprintf(tw, "Network:\t%s\n", out.Config.Network)
}
if out.Config.Hostname != "" {
fmt.Fprintf(tw, "Hostname:\t%s\n", out.Config.Hostname)
}
if len(out.Config.ExtraHosts) > 0 {
fmt.Fprintf(tw, "Extra Hosts:\t%s\n", strings.Join(out.Config.ExtraHosts, ", "))
}
if out.Config.CgroupParent != "" {
fmt.Fprintf(tw, "Cgroup Parent:\t%s\n", out.Config.CgroupParent)
}
if out.Config.ImageResolveMode != "" {
fmt.Fprintf(tw, "Image Resolve Mode:\t%s\n", out.Config.ImageResolveMode)
}
if out.Config.MultiPlatform {
fmt.Fprintf(tw, "Multi-Platform:\t%s\n", strconv.FormatBool(out.Config.MultiPlatform))
}
if out.Config.NoCache {
fmt.Fprintf(tw, "No Cache:\t%s\n", strconv.FormatBool(out.Config.NoCache))
}
if len(out.Config.NoCacheFilter) > 0 {
fmt.Fprintf(tw, "No Cache Filter:\t%s\n", strings.Join(out.Config.NoCacheFilter, ", "))
}
if out.Config.ShmSize != "" {
fmt.Fprintf(tw, "Shm Size:\t%s\n", out.Config.ShmSize)
}
if out.Config.Ulimit != "" {
fmt.Fprintf(tw, "Resource Limits:\t%s\n", out.Config.Ulimit)
}
if out.Config.CacheMountNS != "" {
fmt.Fprintf(tw, "Cache Mount Namespace:\t%s\n", out.Config.CacheMountNS)
}
if out.Config.DockerfileCheckConfig != "" {
fmt.Fprintf(tw, "Dockerfile Check Config:\t%s\n", out.Config.DockerfileCheckConfig)
}
if out.Config.SourceDateEpoch != "" {
fmt.Fprintf(tw, "Source Date Epoch:\t%s\n", out.Config.SourceDateEpoch)
}
if out.Config.SandboxHostname != "" {
fmt.Fprintf(tw, "Sandbox Hostname:\t%s\n", out.Config.SandboxHostname)
}
for _, kv := range out.Config.RestRaw {
fmt.Fprintf(tw, "%s:\t%s\n", kv.Name, kv.Value)
}
tw.Flush()
fmt.Fprintln(dockerCli.Out())
printTable(dockerCli.Out(), out.BuildArgs, "Build Arg")
printTable(dockerCli.Out(), out.Labels, "Label")
if len(out.Materials) > 0 {
fmt.Fprintln(dockerCli.Out(), "Materials:")
tw = tabwriter.NewWriter(dockerCli.Out(), 1, 8, 1, '\t', 0)
fmt.Fprintf(tw, "URI\tDIGEST\n")
for _, m := range out.Materials {
fmt.Fprintf(tw, "%s\t%s\n", m.URI, strings.Join(m.Digests, ", "))
} }
tw.Flush()
fmt.Fprintln(dockerCli.Out()) fmt.Fprintln(dockerCli.Out())
}
if len(out.Attachments) > 0 { var ve *errdefs.VertexError
fmt.Fprintf(tw, "Attachments:\n") if errors.As(retErr, &ve) {
tw = tabwriter.NewWriter(dockerCli.Out(), 1, 8, 1, '\t', 0) dgst, err := digest.Parse(ve.Vertex.Digest)
fmt.Fprintf(tw, "DIGEST\tPLATFORM\tTYPE\n") if err != nil {
for _, a := range out.Attachments { return errors.Wrapf(err, "failed to parse vertex digest %s", ve.Vertex.Digest)
fmt.Fprintf(tw, "%s\t%s\t%s\n", a.Digest, a.Platform, a.Type) }
name, logs, err := loadVertexLogs(ctx, c, rec.Ref, dgst, 16)
if err != nil {
return errors.Wrapf(err, "failed to load vertex logs %s", dgst)
}
if len(logs) > 0 {
fmt.Fprintln(dockerCli.Out(), "Logs:")
fmt.Fprintf(dockerCli.Out(), "> => %s:\n", name)
for _, l := range logs {
fmt.Fprintln(dockerCli.Out(), "> "+l)
}
fmt.Fprintln(dockerCli.Out())
}
} }
tw.Flush()
fmt.Fprintln(dockerCli.Out())
}
if out.Error != nil { if debug.IsEnabled() {
if out.Error.Sources != nil { fmt.Fprintf(dockerCli.Out(), "\n%+v\n", stack.Formatter(retErr))
fmt.Fprint(dockerCli.Out(), string(out.Error.Sources)) } else if len(stack.Traces(retErr)) > 0 {
} fmt.Fprintf(dockerCli.Out(), "Enable --debug to see stack traces for error\n")
if len(out.Error.Logs) > 0 {
fmt.Fprintln(dockerCli.Out(), "Logs:")
fmt.Fprintf(dockerCli.Out(), "> => %s:\n", out.Error.Name)
for _, l := range out.Error.Logs {
fmt.Fprintln(dockerCli.Out(), "> "+l)
}
fmt.Fprintln(dockerCli.Out())
}
if len(out.Error.Stack) > 0 {
if debug.IsEnabled() {
fmt.Fprintf(dockerCli.Out(), "\n%s\n", out.Error.Stack)
} else {
fmt.Fprintf(dockerCli.Out(), "Enable --debug to see stack traces for error\n")
}
} }
} }
@@ -664,8 +388,7 @@ func inspectCmd(dockerCli command.Cli, rootOpts RootOptions) *cobra.Command {
attachmentCmd(dockerCli, rootOpts), attachmentCmd(dockerCli, rootOpts),
) )
flags := cmd.Flags() // flags := cmd.Flags()
flags.StringVar(&options.format, "format", formatter.PrettyFormatKey, "Format the output")
return cmd return cmd
} }
@@ -842,48 +565,36 @@ func descrType(desc ocispecs.Descriptor) string {
return desc.MediaType return desc.MediaType
} }
func tryParseValue[T any](s string, errs *[]string, f func(string) (T, error)) (T, bool) { func tryParseValue(s string, f func(string) (string, error)) string {
v, err := f(s) v, err := f(s)
if err != nil { if err != nil {
errStr := fmt.Sprintf("failed to parse %s: (%v)", s, err) return fmt.Sprintf("%s (%v)", s, err)
*errs = append(*errs, errStr)
} }
return v, true return v
} }
func printTable(w io.Writer, kvs []keyValueOutput, title string) { func printTable(w io.Writer, attrs map[string]string, prefix, title string) {
if len(kvs) == 0 { var keys []string
for k := range attrs {
if strings.HasPrefix(k, prefix) {
keys = append(keys, strings.TrimPrefix(k, prefix))
}
}
slices.Sort(keys)
if len(keys) == 0 {
return return
} }
tw := tabwriter.NewWriter(w, 1, 8, 1, '\t', 0) tw := tabwriter.NewWriter(w, 1, 8, 1, '\t', 0)
fmt.Fprintf(tw, "%s\tVALUE\n", strings.ToUpper(title)) fmt.Fprintf(tw, "%s\tVALUE\n", strings.ToUpper(title))
for _, k := range kvs { for _, k := range keys {
fmt.Fprintf(tw, "%s\t%s\n", k.Name, k.Value) fmt.Fprintf(tw, "%s\t%s\n", k, attrs[prefix+k])
} }
tw.Flush() tw.Flush()
fmt.Fprintln(w) fmt.Fprintln(w)
} }
func readKeyValues(attrs map[string]string, prefix string) []keyValueOutput {
var out []keyValueOutput
for k, v := range attrs {
if strings.HasPrefix(k, prefix) {
out = append(out, keyValueOutput{
Name: strings.TrimPrefix(k, prefix),
Value: v,
})
}
}
if len(out) == 0 {
return nil
}
slices.SortFunc(out, func(a, b keyValueOutput) int {
return cmp.Compare(a.Name, b.Name)
})
return out
}
func digestSetToDigests(ds slsa.DigestSet) []string { func digestSetToDigests(ds slsa.DigestSet) []string {
var out []string var out []string
for k, v := range ds { for k, v := range ds {

View File

@@ -3,6 +3,7 @@ package history
import ( import (
"context" "context"
"io" "io"
"slices"
"github.com/containerd/containerd/v2/core/content/proxy" "github.com/containerd/containerd/v2/core/content/proxy"
"github.com/containerd/platforms" "github.com/containerd/platforms"
@@ -41,7 +42,7 @@ func runAttachment(ctx context.Context, dockerCli command.Cli, opts attachmentOp
} }
} }
recs, err := queryRecords(ctx, opts.ref, nodes, nil) recs, err := queryRecords(ctx, opts.ref, nodes)
if err != nil { if err != nil {
return err return err
} }
@@ -53,6 +54,12 @@ func runAttachment(ctx context.Context, dockerCli command.Cli, opts attachmentOp
return errors.Errorf("no record found for ref %q", opts.ref) return errors.Errorf("no record found for ref %q", opts.ref)
} }
if opts.ref == "" {
slices.SortFunc(recs, func(a, b historyRecord) int {
return b.CreatedAt.AsTime().Compare(a.CreatedAt.AsTime())
})
}
rec := &recs[0] rec := &recs[0]
c, err := rec.node.Driver.Client(ctx) c, err := rec.node.Driver.Client(ctx)

View File

@@ -4,6 +4,7 @@ import (
"context" "context"
"io" "io"
"os" "os"
"slices"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/util/cobrautil/completion" "github.com/docker/buildx/util/cobrautil/completion"
@@ -38,7 +39,7 @@ func runLogs(ctx context.Context, dockerCli command.Cli, opts logsOptions) error
} }
} }
recs, err := queryRecords(ctx, opts.ref, nodes, nil) recs, err := queryRecords(ctx, opts.ref, nodes)
if err != nil { if err != nil {
return err return err
} }
@@ -50,6 +51,12 @@ func runLogs(ctx context.Context, dockerCli command.Cli, opts logsOptions) error
return errors.Errorf("no record found for ref %q", opts.ref) return errors.Errorf("no record found for ref %q", opts.ref)
} }
if opts.ref == "" {
slices.SortFunc(recs, func(a, b historyRecord) int {
return b.CreatedAt.AsTime().Compare(a.CreatedAt.AsTime())
})
}
rec := &recs[0] rec := &recs[0]
c, err := rec.node.Driver.Client(ctx) c, err := rec.node.Driver.Client(ctx)
if err != nil { if err != nil {

View File

@@ -5,8 +5,6 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"os" "os"
"path"
"slices"
"time" "time"
"github.com/containerd/console" "github.com/containerd/console"
@@ -15,13 +13,12 @@ import (
"github.com/docker/buildx/util/cobrautil/completion" "github.com/docker/buildx/util/cobrautil/completion"
"github.com/docker/buildx/util/confutil" "github.com/docker/buildx/util/confutil"
"github.com/docker/buildx/util/desktop" "github.com/docker/buildx/util/desktop"
"github.com/docker/buildx/util/gitutil"
"github.com/docker/cli/cli" "github.com/docker/cli/cli"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/cli/cli/command/formatter" "github.com/docker/cli/cli/command/formatter"
"github.com/docker/go-units" "github.com/docker/go-units"
"github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"golang.org/x/exp/slices"
) )
const ( const (
@@ -41,9 +38,6 @@ type lsOptions struct {
builder string builder string
format string format string
noTrunc bool noTrunc bool
filters []string
local bool
} }
func runLs(ctx context.Context, dockerCli command.Cli, opts lsOptions) error { func runLs(ctx context.Context, dockerCli command.Cli, opts lsOptions) error {
@@ -62,29 +56,7 @@ func runLs(ctx context.Context, dockerCli command.Cli, opts lsOptions) error {
} }
} }
queryOptions := &queryOptions{} out, err := queryRecords(ctx, "", nodes)
if opts.local {
wd, err := os.Getwd()
if err != nil {
return err
}
gitc, err := gitutil.New(gitutil.WithContext(ctx), gitutil.WithWorkingDir(wd))
if err != nil {
if st, err1 := os.Stat(path.Join(wd, ".git")); err1 == nil && st.IsDir() {
return errors.Wrap(err, "git was not found in the system")
}
return errors.Wrapf(err, "could not find git repository for local filter")
}
remote, err := gitc.RemoteURL()
if err != nil {
return errors.Wrapf(err, "could not get remote URL for local filter")
}
queryOptions.Filters = append(queryOptions.Filters, fmt.Sprintf("repository=%s", remote))
}
queryOptions.Filters = append(queryOptions.Filters, opts.filters...)
out, err := queryRecords(ctx, "", nodes, queryOptions)
if err != nil { if err != nil {
return err return err
} }
@@ -120,8 +92,6 @@ func lsCmd(dockerCli command.Cli, rootOpts RootOptions) *cobra.Command {
flags := cmd.Flags() flags := cmd.Flags()
flags.StringVar(&options.format, "format", formatter.TableFormatKey, "Format the output") flags.StringVar(&options.format, "format", formatter.TableFormatKey, "Format the output")
flags.BoolVar(&options.noTrunc, "no-trunc", false, "Don't truncate output") flags.BoolVar(&options.noTrunc, "no-trunc", false, "Don't truncate output")
flags.StringArrayVar(&options.filters, "filter", nil, `Provide filter values (e.g., "status=error")`)
flags.BoolVar(&options.local, "local", false, "List records for current repository only")
return cmd return cmd
} }
@@ -191,7 +161,7 @@ type lsContext struct {
} }
func (c *lsContext) MarshalJSON() ([]byte, error) { func (c *lsContext) MarshalJSON() ([]byte, error) {
m := map[string]any{ m := map[string]interface{}{
"ref": c.FullRef(), "ref": c.FullRef(),
"name": c.Name(), "name": c.Name(),
"status": c.Status(), "status": c.Status(),

View File

@@ -3,6 +3,7 @@ package history
import ( import (
"context" "context"
"fmt" "fmt"
"slices"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/util/cobrautil/completion" "github.com/docker/buildx/util/cobrautil/completion"
@@ -34,7 +35,7 @@ func runOpen(ctx context.Context, dockerCli command.Cli, opts openOptions) error
} }
} }
recs, err := queryRecords(ctx, opts.ref, nodes, nil) recs, err := queryRecords(ctx, opts.ref, nodes)
if err != nil { if err != nil {
return err return err
} }
@@ -46,6 +47,12 @@ func runOpen(ctx context.Context, dockerCli command.Cli, opts openOptions) error
return errors.Errorf("no record found for ref %q", opts.ref) return errors.Errorf("no record found for ref %q", opts.ref)
} }
if opts.ref == "" {
slices.SortFunc(recs, func(a, b historyRecord) int {
return b.CreatedAt.AsTime().Compare(a.CreatedAt.AsTime())
})
}
rec := &recs[0] rec := &recs[0]
url := desktop.BuildURL(fmt.Sprintf("%s/%s/%s", rec.node.Builder, rec.node.Name, rec.Ref)) url := desktop.BuildURL(fmt.Sprintf("%s/%s/%s", rec.node.Builder, rec.node.Name, rec.Ref))

View File

@@ -24,9 +24,6 @@ func RootCmd(rootcmd *cobra.Command, dockerCli command.Cli, opts RootOptions) *c
logsCmd(dockerCli, opts), logsCmd(dockerCli, opts),
inspectCmd(dockerCli, opts), inspectCmd(dockerCli, opts),
openCmd(dockerCli, opts), openCmd(dockerCli, opts),
traceCmd(dockerCli, opts),
importCmd(dockerCli, opts),
exportCmd(dockerCli, opts),
) )
return cmd return cmd

View File

@@ -1,228 +0,0 @@
package history
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net"
"os"
"time"
"github.com/containerd/console"
"github.com/containerd/containerd/v2/core/content/proxy"
"github.com/docker/buildx/builder"
"github.com/docker/buildx/util/cobrautil/completion"
"github.com/docker/buildx/util/otelutil"
"github.com/docker/buildx/util/otelutil/jaeger"
"github.com/docker/cli/cli/command"
controlapi "github.com/moby/buildkit/api/services/control"
"github.com/opencontainers/go-digest"
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/pkg/browser"
"github.com/pkg/errors"
"github.com/spf13/cobra"
jaegerui "github.com/tonistiigi/jaeger-ui-rest"
)
type traceOptions struct {
builder string
ref string
addr string
compare string
}
func loadTrace(ctx context.Context, ref string, nodes []builder.Node) (string, []byte, error) {
recs, err := queryRecords(ctx, ref, nodes, &queryOptions{
CompletedOnly: true,
})
if err != nil {
return "", nil, err
}
if len(recs) == 0 {
if ref == "" {
return "", nil, errors.New("no records found")
}
return "", nil, errors.Errorf("no record found for ref %q", ref)
}
rec := &recs[0]
if rec.CompletedAt == nil {
return "", nil, errors.Errorf("build %q is not completed, only completed builds can be traced", rec.Ref)
}
if rec.Trace == nil {
// build is complete but no trace yet. try to finalize the trace
time.Sleep(1 * time.Second) // give some extra time for last parts of trace to be written
c, err := rec.node.Driver.Client(ctx)
if err != nil {
return "", nil, err
}
_, err = c.ControlClient().UpdateBuildHistory(ctx, &controlapi.UpdateBuildHistoryRequest{
Ref: rec.Ref,
Finalize: true,
})
if err != nil {
return "", nil, err
}
recs, err := queryRecords(ctx, rec.Ref, []builder.Node{*rec.node}, &queryOptions{
CompletedOnly: true,
})
if err != nil {
return "", nil, err
}
if len(recs) == 0 {
return "", nil, errors.Errorf("build record %q was deleted", rec.Ref)
}
rec = &recs[0]
if rec.Trace == nil {
return "", nil, errors.Errorf("build record %q is missing a trace", rec.Ref)
}
}
c, err := rec.node.Driver.Client(ctx)
if err != nil {
return "", nil, err
}
store := proxy.NewContentStore(c.ContentClient())
ra, err := store.ReaderAt(ctx, ocispecs.Descriptor{
Digest: digest.Digest(rec.Trace.Digest),
MediaType: rec.Trace.MediaType,
Size: rec.Trace.Size,
})
if err != nil {
return "", nil, err
}
spans, err := otelutil.ParseSpanStubs(io.NewSectionReader(ra, 0, ra.Size()))
if err != nil {
return "", nil, err
}
wrapper := struct {
Data []jaeger.Trace `json:"data"`
}{
Data: spans.JaegerData().Data,
}
if len(wrapper.Data) == 0 {
return "", nil, errors.New("no trace data")
}
buf := &bytes.Buffer{}
enc := json.NewEncoder(buf)
enc.SetIndent("", " ")
if err := enc.Encode(wrapper); err != nil {
return "", nil, err
}
return string(wrapper.Data[0].TraceID), buf.Bytes(), nil
}
func runTrace(ctx context.Context, dockerCli command.Cli, opts traceOptions) error {
b, err := builder.New(dockerCli, builder.WithName(opts.builder))
if err != nil {
return err
}
nodes, err := b.LoadNodes(ctx)
if err != nil {
return err
}
for _, node := range nodes {
if node.Err != nil {
return node.Err
}
}
traceID, data, err := loadTrace(ctx, opts.ref, nodes)
if err != nil {
return err
}
srv := jaegerui.NewServer(jaegerui.Config{})
if err := srv.AddTrace(traceID, bytes.NewReader(data)); err != nil {
return err
}
url := "/trace/" + traceID
if opts.compare != "" {
traceIDcomp, data, err := loadTrace(ctx, opts.compare, nodes)
if err != nil {
return errors.Wrapf(err, "failed to load trace for %s", opts.compare)
}
if err := srv.AddTrace(traceIDcomp, bytes.NewReader(data)); err != nil {
return err
}
url = "/trace/" + traceIDcomp + "..." + traceID
}
var term bool
if _, err := console.ConsoleFromFile(os.Stdout); err == nil {
term = true
}
if !term && opts.compare == "" {
fmt.Fprintln(dockerCli.Out(), string(data))
return nil
}
ln, err := net.Listen("tcp", opts.addr)
if err != nil {
return err
}
go func() {
time.Sleep(100 * time.Millisecond)
browser.OpenURL(url)
}()
url = "http://" + ln.Addr().String() + url
fmt.Fprintf(dockerCli.Err(), "Trace available at %s\n", url)
go func() {
<-ctx.Done()
ln.Close()
}()
err = srv.Serve(ln)
if err != nil {
select {
case <-ctx.Done():
return nil
default:
}
}
return err
}
func traceCmd(dockerCli command.Cli, rootOpts RootOptions) *cobra.Command {
var options traceOptions
cmd := &cobra.Command{
Use: "trace [OPTIONS] [REF]",
Short: "Show the OpenTelemetry trace of a build record",
Args: cobra.MaximumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
if len(args) > 0 {
options.ref = args[0]
}
options.builder = *rootOpts.Builder
return runTrace(cmd.Context(), dockerCli, options)
},
ValidArgsFunction: completion.Disable,
}
flags := cmd.Flags()
flags.StringVar(&options.addr, "addr", "127.0.0.1:0", "Address to bind the UI server")
flags.StringVar(&options.compare, "compare", "", "Compare with another build reference")
return cmd
}

View File

@@ -1,14 +1,10 @@
package history package history
import ( import (
"bytes"
"context" "context"
"encoding/csv"
"fmt" "fmt"
"io" "io"
"path/filepath" "path/filepath"
"slices"
"strconv"
"strings" "strings"
"sync" "sync"
"time" "time"
@@ -17,13 +13,10 @@ import (
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/localstate" "github.com/docker/buildx/localstate"
controlapi "github.com/moby/buildkit/api/services/control" controlapi "github.com/moby/buildkit/api/services/control"
"github.com/moby/buildkit/util/gitutil"
"github.com/pkg/errors" "github.com/pkg/errors"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
) )
const recordsLimit = 50
func buildName(fattrs map[string]string, ls *localstate.State) string { func buildName(fattrs map[string]string, ls *localstate.State) string {
var res string var res string
@@ -113,30 +106,10 @@ type historyRecord struct {
name string name string
} }
type queryOptions struct { func queryRecords(ctx context.Context, ref string, nodes []builder.Node) ([]historyRecord, error) {
CompletedOnly bool
Filters []string
}
func queryRecords(ctx context.Context, ref string, nodes []builder.Node, opts *queryOptions) ([]historyRecord, error) {
var mu sync.Mutex var mu sync.Mutex
var out []historyRecord var out []historyRecord
var offset *int
if strings.HasPrefix(ref, "^") {
off, err := strconv.Atoi(ref[1:])
if err != nil {
return nil, errors.Wrapf(err, "invalid offset %q", ref)
}
offset = &off
ref = ""
}
var filters []string
if opts != nil {
filters = opts.Filters
}
eg, ctx := errgroup.WithContext(ctx) eg, ctx := errgroup.WithContext(ctx)
for _, node := range nodes { for _, node := range nodes {
node := node node := node
@@ -149,25 +122,9 @@ func queryRecords(ctx context.Context, ref string, nodes []builder.Node, opts *q
if err != nil { if err != nil {
return err return err
} }
var matchers []matchFunc
if len(filters) > 0 {
filters, matchers, err = dockerFiltersToBuildkit(filters)
if err != nil {
return err
}
sb := bytes.NewBuffer(nil)
w := csv.NewWriter(sb)
w.Write(filters)
w.Flush()
filters = []string{strings.TrimSuffix(sb.String(), "\n")}
}
serv, err := c.ControlClient().ListenBuildHistory(ctx, &controlapi.BuildHistoryRequest{ serv, err := c.ControlClient().ListenBuildHistory(ctx, &controlapi.BuildHistoryRequest{
EarlyExit: true, EarlyExit: true,
Ref: ref, Ref: ref,
Limit: recordsLimit,
Filter: filters,
}) })
if err != nil { if err != nil {
return err return err
@@ -185,7 +142,6 @@ func queryRecords(ctx context.Context, ref string, nodes []builder.Node, opts *q
ts = &t ts = &t
} }
defer serv.CloseSend() defer serv.CloseSend()
loop0:
for { for {
he, err := serv.Recv() he, err := serv.Recv()
if err != nil { if err != nil {
@@ -197,17 +153,6 @@ func queryRecords(ctx context.Context, ref string, nodes []builder.Node, opts *q
if he.Type == controlapi.BuildHistoryEventType_DELETED || he.Record == nil { if he.Type == controlapi.BuildHistoryEventType_DELETED || he.Record == nil {
continue continue
} }
if opts != nil && opts.CompletedOnly && he.Type != controlapi.BuildHistoryEventType_COMPLETE {
continue
}
// for older buildkit that don't support filters apply local filters
for _, matcher := range matchers {
if !matcher(he.Record) {
continue loop0
}
}
records = append(records, historyRecord{ records = append(records, historyRecord{
BuildHistoryRecord: he.Record, BuildHistoryRecord: he.Record,
currentTimestamp: ts, currentTimestamp: ts,
@@ -224,27 +169,6 @@ func queryRecords(ctx context.Context, ref string, nodes []builder.Node, opts *q
if err := eg.Wait(); err != nil { if err := eg.Wait(); err != nil {
return nil, err return nil, err
} }
slices.SortFunc(out, func(a, b historyRecord) int {
return b.CreatedAt.AsTime().Compare(a.CreatedAt.AsTime())
})
if offset != nil {
var filtered []historyRecord
for _, r := range out {
if *offset > 0 {
*offset--
continue
}
filtered = append(filtered, r)
break
}
if *offset > 0 {
return nil, errors.Errorf("no completed build found with offset %d", *offset)
}
out = filtered
}
return out, nil return out, nil
} }
@@ -254,150 +178,3 @@ func formatDuration(d time.Duration) string {
} }
return fmt.Sprintf("%dm %2ds", int(d.Minutes()), int(d.Seconds())%60) return fmt.Sprintf("%dm %2ds", int(d.Minutes()), int(d.Seconds())%60)
} }
type matchFunc func(*controlapi.BuildHistoryRecord) bool
func dockerFiltersToBuildkit(in []string) ([]string, []matchFunc, error) {
out := []string{}
matchers := []matchFunc{}
for _, f := range in {
key, value, sep, found := cutAny(f, "!=", "=", "<=", "<", ">=", ">")
if !found {
return nil, nil, errors.Errorf("invalid filter %q", f)
}
switch key {
case "ref", "repository", "status":
if sep != "=" && sep != "!=" {
return nil, nil, errors.Errorf("invalid separator for %q, expected = or !=", f)
}
matchers = append(matchers, valueFiler(key, value, sep))
if sep == "=" {
if key == "status" {
sep = "=="
} else {
sep = "~="
}
}
case "startedAt", "completedAt", "duration":
if sep == "=" || sep == "!=" {
return nil, nil, errors.Errorf("invalid separator for %q, expected <=, <, >= or >", f)
}
matcher, err := timeBasedFilter(key, value, sep)
if err != nil {
return nil, nil, err
}
matchers = append(matchers, matcher)
default:
return nil, nil, errors.Errorf("unsupported filter %q", f)
}
out = append(out, key+sep+value)
}
return out, matchers, nil
}
func valueFiler(key, value, sep string) matchFunc {
return func(rec *controlapi.BuildHistoryRecord) bool {
var recValue string
switch key {
case "ref":
recValue = rec.Ref
case "repository":
v, ok := rec.FrontendAttrs["vcs:source"]
if ok {
recValue = v
} else {
if context, ok := rec.FrontendAttrs["context"]; ok {
if ref, err := gitutil.ParseGitRef(context); err == nil {
recValue = ref.Remote
}
}
}
case "status":
if rec.CompletedAt != nil {
if rec.Error != nil {
if strings.Contains(rec.Error.Message, "context canceled") {
recValue = "canceled"
} else {
recValue = "error"
}
} else {
recValue = "completed"
}
} else {
recValue = "running"
}
}
switch sep {
case "=":
if key == "status" {
return recValue == value
}
return strings.Contains(recValue, value)
case "!=":
return recValue != value
default:
return false
}
}
}
func timeBasedFilter(key, value, sep string) (matchFunc, error) {
var cmp int64
switch key {
case "startedAt", "completedAt":
v, err := time.ParseDuration(value)
if err == nil {
tm := time.Now().Add(-v)
cmp = tm.Unix()
} else {
tm, err := time.Parse(time.RFC3339, value)
if err != nil {
return nil, errors.Errorf("invalid time %s", value)
}
cmp = tm.Unix()
}
case "duration":
v, err := time.ParseDuration(value)
if err != nil {
return nil, errors.Errorf("invalid duration %s", value)
}
cmp = int64(v)
default:
return nil, nil
}
return func(rec *controlapi.BuildHistoryRecord) bool {
var val int64
switch key {
case "startedAt":
val = rec.CreatedAt.AsTime().Unix()
case "completedAt":
if rec.CompletedAt != nil {
val = rec.CompletedAt.AsTime().Unix()
}
case "duration":
if rec.CompletedAt != nil {
val = int64(rec.CompletedAt.AsTime().Sub(rec.CreatedAt.AsTime()))
}
}
switch sep {
case ">=":
return val >= cmp
case "<=":
return val <= cmp
case ">":
return val > cmp
default:
return val < cmp
}
}, nil
}
func cutAny(s string, seps ...string) (before, after, sep string, found bool) {
for _, sep := range seps {
if idx := strings.Index(s, sep); idx != -1 {
return s[:idx], s[idx+len(sep):], sep, true
}
}
return s, "", "", false
}

View File

@@ -194,7 +194,7 @@ func runCreate(ctx context.Context, dockerCli command.Cli, in createOptions, arg
} }
s := s s := s
eg2.Go(func() error { eg2.Go(func() error {
sub.Log(1, fmt.Appendf(nil, "copying %s from %s to %s\n", s.Desc.Digest.String(), s.Ref.String(), t.String())) sub.Log(1, []byte(fmt.Sprintf("copying %s from %s to %s\n", s.Desc.Digest.String(), s.Ref.String(), t.String())))
return r.Copy(ctx, s, t) return r.Copy(ctx, s, t)
}) })
} }
@@ -202,7 +202,7 @@ func runCreate(ctx context.Context, dockerCli command.Cli, in createOptions, arg
if err := eg2.Wait(); err != nil { if err := eg2.Wait(); err != nil {
return err return err
} }
sub.Log(1, fmt.Appendf(nil, "pushing %s to %s\n", desc.Digest.String(), t.String())) sub.Log(1, []byte(fmt.Sprintf("pushing %s to %s\n", desc.Digest.String(), t.String())))
return r.Push(ctx, t, desc, dt) return r.Push(ctx, t, desc, dt)
}) })
}) })

View File

@@ -115,25 +115,6 @@ func runInspect(ctx context.Context, dockerCli command.Cli, in inspectOptions) e
fmt.Fprintf(w, "\t%s:\t%s\n", k, v) fmt.Fprintf(w, "\t%s:\t%s\n", k, v)
} }
} }
if len(nodes[i].CDIDevices) > 0 {
fmt.Fprintf(w, "Devices:\n")
for _, dev := range nodes[i].CDIDevices {
fmt.Fprintf(w, "\tName:\t%s\n", dev.Name)
if dev.OnDemand {
fmt.Fprintf(w, "\tOn-Demand:\t%v\n", dev.OnDemand)
} else {
fmt.Fprintf(w, "\tAutomatically allowed:\t%v\n", dev.AutoAllow)
}
if len(dev.Annotations) > 0 {
fmt.Fprintf(w, "\tAnnotations:\n")
for k, v := range dev.Annotations {
fmt.Fprintf(w, "\t\t%s:\t%s\n", k, v)
}
}
}
}
for ri, rule := range nodes[i].GCPolicy { for ri, rule := range nodes[i].GCPolicy {
fmt.Fprintf(w, "GC Policy rule#%d:\n", ri) fmt.Fprintf(w, "GC Policy rule#%d:\n", ri)
fmt.Fprintf(w, "\tAll:\t%v\n", rule.All) fmt.Fprintf(w, "\tAll:\t%v\n", rule.All)

View File

@@ -4,7 +4,6 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"maps"
"sort" "sort"
"strings" "strings"
"time" "time"
@@ -160,9 +159,6 @@ func lsPrint(dockerCli command.Cli, current *store.NodeGroup, builders []*builde
} }
continue continue
} }
if ctx.Format.IsJSON() {
continue
}
for _, n := range b.Nodes() { for _, n := range b.Nodes() {
if n.Err != nil { if n.Err != nil {
if ctx.Format.IsTable() { if ctx.Format.IsTable() {
@@ -410,7 +406,9 @@ func truncPlatforms(pfs []string, max int) truncatedPlatforms {
left[ppf] = append(left[ppf], pf) left[ppf] = append(left[ppf], pf)
} }
} }
maps.Copy(res, left) for k, v := range left {
res[k] = v
}
return truncatedPlatforms{ return truncatedPlatforms{
res: res, res: res,
input: pfs, input: pfs,

View File

@@ -16,14 +16,13 @@ import (
"github.com/docker/cli/cli-plugins/plugin" "github.com/docker/cli/cli-plugins/plugin"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/cli/cli/debug" "github.com/docker/cli/cli/debug"
cliflags "github.com/docker/cli/cli/flags"
"github.com/moby/buildkit/util/appcontext" "github.com/moby/buildkit/util/appcontext"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
) )
func NewRootCmd(name string, isPlugin bool, dockerCli *command.DockerCli) *cobra.Command { func NewRootCmd(name string, isPlugin bool, dockerCli command.Cli) *cobra.Command {
var opt rootOptions var opt rootOptions
cmd := &cobra.Command{ cmd := &cobra.Command{
Short: "Docker Buildx", Short: "Docker Buildx",
@@ -41,17 +40,7 @@ func NewRootCmd(name string, isPlugin bool, dockerCli *command.DockerCli) *cobra
} }
cmd.SetContext(appcontext.Context()) cmd.SetContext(appcontext.Context())
if !isPlugin { if !isPlugin {
// InstallFlags and SetDefaultOptions are necessary to match return nil
// the plugin mode behavior to handle env vars such as
// DOCKER_TLS, DOCKER_TLS_VERIFY, ... and we also need to use a
// new flagset to avoid conflict with the global debug flag
// that we already handle in the root command otherwise it
// would panic.
nflags := pflag.NewFlagSet(cmd.DisplayName(), pflag.ContinueOnError)
options := cliflags.NewClientOptions()
options.InstallFlags(nflags)
options.SetDefaultOptions(nflags)
return dockerCli.Initialize(options)
} }
return plugin.PersistentPreRunE(cmd, args) return plugin.PersistentPreRunE(cmd, args)
}, },

View File

@@ -75,9 +75,7 @@ func RunBuild(ctx context.Context, dockerCli command.Cli, in *controllerapi.Buil
opts.Platforms = platforms opts.Platforms = platforms
dockerConfig := dockerCli.ConfigFile() dockerConfig := dockerCli.ConfigFile()
opts.Session = append(opts.Session, authprovider.NewDockerAuthProvider(authprovider.DockerAuthProviderConfig{ opts.Session = append(opts.Session, authprovider.NewDockerAuthProvider(dockerConfig, nil))
ConfigFile: dockerConfig,
}))
secrets, err := controllerapi.CreateSecrets(in.Secrets) secrets, err := controllerapi.CreateSecrets(in.Secrets)
if err != nil { if err != nil {

View File

@@ -13,8 +13,8 @@ import (
type BuildxController interface { type BuildxController interface {
Build(ctx context.Context, options *controllerapi.BuildOptions, in io.ReadCloser, progress progress.Writer) (ref string, resp *client.SolveResponse, inputs *build.Inputs, err error) Build(ctx context.Context, options *controllerapi.BuildOptions, in io.ReadCloser, progress progress.Writer) (ref string, resp *client.SolveResponse, inputs *build.Inputs, err error)
// Invoke starts an IO session into the specified process. // Invoke starts an IO session into the specified process.
// If pid doesn't match to any running processes, it starts a new process with the specified config. // If pid doesn't matche to any running processes, it starts a new process with the specified config.
// If there is no container running or InvokeConfig.Rollback is specified, the process will start in a newly created container. // If there is no container running or InvokeConfig.Rollback is speicfied, the process will start in a newly created container.
// NOTE: If needed, in the future, we can split this API into three APIs (NewContainer, NewProcess and Attach). // NOTE: If needed, in the future, we can split this API into three APIs (NewContainer, NewProcess and Attach).
Invoke(ctx context.Context, ref, pid string, options *controllerapi.InvokeConfig, ioIn io.ReadCloser, ioOut io.WriteCloser, ioErr io.WriteCloser) error Invoke(ctx context.Context, ref, pid string, options *controllerapi.InvokeConfig, ioIn io.ReadCloser, ioOut io.WriteCloser, ioErr io.WriteCloser) error
Kill(ctx context.Context) error Kill(ctx context.Context) error

View File

@@ -1,10 +1,6 @@
package pb package pb
import ( import "github.com/moby/buildkit/client"
"maps"
"github.com/moby/buildkit/client"
)
func CreateCaches(entries []*CacheOptionsEntry) []client.CacheOptionsEntry { func CreateCaches(entries []*CacheOptionsEntry) []client.CacheOptionsEntry {
var outs []client.CacheOptionsEntry var outs []client.CacheOptionsEntry
@@ -16,7 +12,9 @@ func CreateCaches(entries []*CacheOptionsEntry) []client.CacheOptionsEntry {
Type: entry.Type, Type: entry.Type,
Attrs: map[string]string{}, Attrs: map[string]string{},
} }
maps.Copy(out.Attrs, entry.Attrs) for k, v := range entry.Attrs {
out.Attrs[k] = v
}
outs = append(outs, out) outs = append(outs, out)
} }
return outs return outs

View File

@@ -2,7 +2,6 @@ package pb
import ( import (
"io" "io"
"maps"
"os" "os"
"strconv" "strconv"
@@ -27,7 +26,9 @@ func CreateExports(entries []*ExportEntry) ([]client.ExportEntry, []string, erro
Type: entry.Type, Type: entry.Type,
Attrs: map[string]string{}, Attrs: map[string]string{},
} }
maps.Copy(out.Attrs, entry.Attrs) for k, v := range entry.Attrs {
out.Attrs[k] = v
}
supportFile := false supportFile := false
supportDir := false supportDir := false

View File

@@ -24,11 +24,11 @@ func (w *writer) Write(status *client.SolveStatus) {
func (w *writer) WriteBuildRef(target string, ref string) {} func (w *writer) WriteBuildRef(target string, ref string) {}
func (w *writer) ValidateLogSource(digest.Digest, any) bool { func (w *writer) ValidateLogSource(digest.Digest, interface{}) bool {
return true return true
} }
func (w *writer) ClearLogSource(any) {} func (w *writer) ClearLogSource(interface{}) {}
func ToControlStatus(s *client.SolveStatus) *StatusResponse { func ToControlStatus(s *client.SolveStatus) *StatusResponse {
resp := StatusResponse{} resp := StatusResponse{}

View File

@@ -1,8 +1,6 @@
package pb package pb
import ( import (
"slices"
"github.com/moby/buildkit/session" "github.com/moby/buildkit/session"
"github.com/moby/buildkit/session/sshforward/sshprovider" "github.com/moby/buildkit/session/sshforward/sshprovider"
) )
@@ -12,7 +10,7 @@ func CreateSSH(ssh []*SSH) (session.Attachable, error) {
for _, ssh := range ssh { for _, ssh := range ssh {
cfg := sshprovider.AgentConfig{ cfg := sshprovider.AgentConfig{
ID: ssh.ID, ID: ssh.ID,
Paths: slices.Clone(ssh.Paths), Paths: append([]string{}, ssh.Paths...),
} }
configs = append(configs, cfg) configs = append(configs, cfg)
} }

View File

@@ -39,7 +39,7 @@ func (p *Process) Done() <-chan error {
return p.errCh return p.errCh
} }
// Manager manages a set of processes. // Manager manages a set of proceses.
type Manager struct { type Manager struct {
container atomic.Value container atomic.Value
processes sync.Map processes sync.Map

View File

@@ -140,7 +140,7 @@ func serveCmd(dockerCli command.Cli) *cobra.Command {
return err return err
} }
pidF := filepath.Join(root, defaultPIDFilename) pidF := filepath.Join(root, defaultPIDFilename)
if err := os.WriteFile(pidF, fmt.Appendf(nil, "%d", os.Getpid()), 0600); err != nil { if err := os.WriteFile(pidF, []byte(fmt.Sprintf("%d", os.Getpid())), 0600); err != nil {
return err return err
} }
defer func() { defer func() {

View File

@@ -48,8 +48,6 @@ target "lint" {
"linux/s390x", "linux/s390x",
"linux/ppc64le", "linux/ppc64le",
"linux/riscv64", "linux/riscv64",
"netbsd/amd64",
"netbsd/arm64",
"openbsd/amd64", "openbsd/amd64",
"openbsd/arm64", "openbsd/arm64",
"windows/amd64", "windows/amd64",
@@ -169,8 +167,6 @@ target "binaries-cross" {
"linux/ppc64le", "linux/ppc64le",
"linux/riscv64", "linux/riscv64",
"linux/s390x", "linux/s390x",
"netbsd/amd64",
"netbsd/arm64",
"openbsd/amd64", "openbsd/amd64",
"openbsd/arm64", "openbsd/arm64",
"windows/amd64", "windows/amd64",

View File

@@ -285,11 +285,19 @@ The key takes a list of annotations, in the format of `KEY=VALUE`.
```hcl ```hcl
target "default" { target "default" {
output = [{ type = "image", name = "foo" }] output = ["type=image,name=foo"]
annotations = ["org.opencontainers.image.authors=dvdksn"] annotations = ["org.opencontainers.image.authors=dvdksn"]
} }
``` ```
is the same as
```hcl
target "default" {
output = ["type=image,name=foo,annotation.org.opencontainers.image.authors=dvdksn"]
}
```
By default, the annotation is added to image manifests. You can configure the By default, the annotation is added to image manifests. You can configure the
level of the annotations by adding a prefix to the annotation, containing a level of the annotations by adding a prefix to the annotation, containing a
comma-separated list of all the levels that you want to annotate. The following comma-separated list of all the levels that you want to annotate. The following
@@ -297,7 +305,7 @@ example adds annotations to both the image index and manifests.
```hcl ```hcl
target "default" { target "default" {
output = [{ type = "image", name = "foo" }] output = ["type=image,name=foo"]
annotations = ["index,manifest:org.opencontainers.image.authors=dvdksn"] annotations = ["index,manifest:org.opencontainers.image.authors=dvdksn"]
} }
``` ```
@@ -313,13 +321,8 @@ This attribute accepts the long-form CSV version of attestation parameters.
```hcl ```hcl
target "default" { target "default" {
attest = [ attest = [
{ "type=provenance,mode=min",
type = "provenance", "type=sbom"
mode = "max",
},
{
type = "sbom",
}
] ]
} }
``` ```
@@ -335,15 +338,8 @@ This takes a list value, so you can specify multiple cache sources.
```hcl ```hcl
target "app" { target "app" {
cache-from = [ cache-from = [
{ "type=s3,region=eu-west-1,bucket=mybucket",
type = "s3", "user/repo:cache",
region = "eu-west-1",
bucket = "mybucket"
},
{
type = "registry",
ref = "user/repo:cache"
}
] ]
} }
``` ```
@@ -359,14 +355,8 @@ This takes a list value, so you can specify multiple cache export targets.
```hcl ```hcl
target "app" { target "app" {
cache-to = [ cache-to = [
{ "type=s3,region=eu-west-1,bucket=mybucket",
type = "s3", "type=inline"
region = "eu-west-1",
bucket = "mybucket"
},
{
type = "inline",
}
] ]
} }
``` ```
@@ -873,7 +863,7 @@ The following example configures the target to use a cache-only output,
```hcl ```hcl
target "default" { target "default" {
output = [{ type = "cacheonly" }] output = ["type=cacheonly"]
} }
``` ```
@@ -913,8 +903,8 @@ variable "HOME" {
target "default" { target "default" {
secret = [ secret = [
{ type = "env", id = "KUBECONFIG" }, "type=env,id=KUBECONFIG",
{ type = "file", id = "aws", src = "${HOME}/.aws/credentials" }, "type=file,id=aws,src=${HOME}/.aws/credentials"
] ]
} }
``` ```
@@ -958,7 +948,7 @@ This can be useful if you need to access private repositories during a build.
```hcl ```hcl
target "default" { target "default" {
ssh = [{ id = "default" }] ssh = ["default"]
} }
``` ```

View File

@@ -347,22 +347,18 @@ is defined in https://golang.org/pkg/path/#Match.
```console ```console
$ docker buildx bake --set target.args.mybuildarg=value $ docker buildx bake --set target.args.mybuildarg=value
$ docker buildx bake --set target.platform=linux/arm64 $ docker buildx bake --set target.platform=linux/arm64
$ docker buildx bake --set foo*.args.mybuildarg=value # overrides build arg for all targets starting with 'foo' $ docker buildx bake --set foo*.args.mybuildarg=value # overrides build arg for all targets starting with 'foo'
$ docker buildx bake --set *.platform=linux/arm64 # overrides platform for all targets $ docker buildx bake --set *.platform=linux/arm64 # overrides platform for all targets
$ docker buildx bake --set foo*.no-cache # bypass caching only for targets starting with 'foo' $ docker buildx bake --set foo*.no-cache # bypass caching only for targets starting with 'foo'
$ docker buildx bake --set target.platform+=linux/arm64 # appends 'linux/arm64' to the platform list
``` ```
You can override the following fields: You can override the following fields:
* `annotations`
* `attest`
* `args` * `args`
* `cache-from` * `cache-from`
* `cache-to` * `cache-to`
* `context` * `context`
* `dockerfile` * `dockerfile`
* `entitlements`
* `labels` * `labels`
* `load` * `load`
* `no-cache` * `no-cache`
@@ -375,20 +371,3 @@ You can override the following fields:
* `ssh` * `ssh`
* `tags` * `tags`
* `target` * `target`
You can append using `+=` operator for the following fields:
* `annotations
* `attest
* `cache-from`
* `cache-to`
* `entitlements
* `no-cache-filter`
* `output`
* `platform`
* `secrets`
* `ssh`
* `tags`
> [!NOTE]
> ¹ These fields already append by default.

View File

@@ -16,7 +16,7 @@ Start a build
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:----------------------------------------|:--------------|:----------|:-------------------------------------------------------------------------------------------------------------| |:----------------------------------------|:--------------|:----------|:-------------------------------------------------------------------------------------------------------------|
| [`--add-host`](#add-host) | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) | | [`--add-host`](#add-host) | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) |
| [`--allow`](#allow) | `stringArray` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) | | [`--allow`](#allow) | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) |
| [`--annotation`](#annotation) | `stringArray` | | Add annotation to the image | | [`--annotation`](#annotation) | `stringArray` | | Add annotation to the image |
| [`--attest`](#attest) | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) | | [`--attest`](#attest) | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) |
| [`--build-arg`](#build-arg) | `stringArray` | | Set build-time variables | | [`--build-arg`](#build-arg) | `stringArray` | | Set build-time variables |
@@ -944,7 +944,7 @@ $ docker buildx build --secret [type=file,]id=<ID>[,src=<FILEPATH>] .
###### `type=file` usage ###### `type=file` usage
In the following example, `type=file` is automatically detected because no In the following example, `type=file` is automatically detected because no
environment variable matching `aws` (the ID) is set. environment variable mathing `aws` (the ID) is set.
```console ```console
$ docker buildx build --secret id=aws,src=$HOME/.aws/credentials . $ docker buildx build --secret id=aws,src=$HOME/.aws/credentials .

View File

@@ -12,7 +12,7 @@ Start a build
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:--------------------|:--------------|:----------|:-------------------------------------------------------------------------------------------------------------| |:--------------------|:--------------|:----------|:-------------------------------------------------------------------------------------------------------------|
| `--add-host` | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) | | `--add-host` | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) |
| `--allow` | `stringArray` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) | | `--allow` | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) |
| `--annotation` | `stringArray` | | Add annotation to the image | | `--annotation` | `stringArray` | | Add annotation to the image |
| `--attest` | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) | | `--attest` | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) |
| `--build-arg` | `stringArray` | | Set build-time variables | | `--build-arg` | `stringArray` | | Set build-time variables |

View File

@@ -5,16 +5,13 @@ Commands to work on build records
### Subcommands ### Subcommands
| Name | Description | | Name | Description |
|:---------------------------------------|:-----------------------------------------------| |:---------------------------------------|:-------------------------------|
| [`export`](buildx_history_export.md) | Export a build into Docker Desktop bundle | | [`inspect`](buildx_history_inspect.md) | Inspect a build |
| [`import`](buildx_history_import.md) | Import a build into Docker Desktop | | [`logs`](buildx_history_logs.md) | Print the logs of a build |
| [`inspect`](buildx_history_inspect.md) | Inspect a build | | [`ls`](buildx_history_ls.md) | List build records |
| [`logs`](buildx_history_logs.md) | Print the logs of a build | | [`open`](buildx_history_open.md) | Open a build in Docker Desktop |
| [`ls`](buildx_history_ls.md) | List build records | | [`rm`](buildx_history_rm.md) | Remove build records |
| [`open`](buildx_history_open.md) | Open a build in Docker Desktop |
| [`rm`](buildx_history_rm.md) | Remove build records |
| [`trace`](buildx_history_trace.md) | Show the OpenTelemetry trace of a build record |
### Options ### Options

View File

@@ -1,17 +0,0 @@
# docker buildx history export
<!---MARKER_GEN_START-->
Export a build into Docker Desktop bundle
### Options
| Name | Type | Default | Description |
|:-----------------|:---------|:--------|:-----------------------------------------|
| `--all` | `bool` | | Export all records for the builder |
| `--builder` | `string` | | Override the configured builder instance |
| `-D`, `--debug` | `bool` | | Enable debug logging |
| `-o`, `--output` | `string` | | Output file path |
<!---MARKER_GEN_END-->

View File

@@ -1,16 +0,0 @@
# docker buildx history import
<!---MARKER_GEN_START-->
Import a build into Docker Desktop
### Options
| Name | Type | Default | Description |
|:----------------|:--------------|:--------|:-----------------------------------------|
| `--builder` | `string` | | Override the configured builder instance |
| `-D`, `--debug` | `bool` | | Enable debug logging |
| `-f`, `--file` | `stringArray` | | Import from a file path |
<!---MARKER_GEN_END-->

View File

@@ -12,106 +12,11 @@ Inspect a build
### Options ### Options
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:----------------------|:---------|:---------|:-----------------------------------------| |:----------------|:---------|:--------|:-----------------------------------------|
| `--builder` | `string` | | Override the configured builder instance | | `--builder` | `string` | | Override the configured builder instance |
| `-D`, `--debug` | `bool` | | Enable debug logging | | `-D`, `--debug` | `bool` | | Enable debug logging |
| [`--format`](#format) | `string` | `pretty` | Format the output |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->
## Examples
### <a name="format"></a> Format the output (--format)
The formatting options (`--format`) pretty-prints the output to `pretty` (default),
`json` or using a Go template.
```console
$ docker buildx history inspect
Name: buildx (binaries)
Context: .
Dockerfile: Dockerfile
VCS Repository: https://github.com/crazy-max/buildx.git
VCS Revision: f15eaa1ee324ffbbab29605600d27a84cab86361
Target: binaries
Platforms: linux/amd64
Keep Git Dir: true
Started: 2025-02-07 11:56:24
Duration: 1m 1s
Build Steps: 16/16 (25% cached)
Image Resolve Mode: local
Materials:
URI DIGEST
pkg:docker/docker/dockerfile@1 sha256:93bfd3b68c109427185cd78b4779fc82b484b0b7618e36d0f104d4d801e66d25
pkg:docker/golang@1.23-alpine3.21?platform=linux%2Famd64 sha256:2c49857f2295e89b23b28386e57e018a86620a8fede5003900f2d138ba9c4037
pkg:docker/tonistiigi/xx@1.6.1?platform=linux%2Famd64 sha256:923441d7c25f1e2eb5789f82d987693c47b8ed987c4ab3b075d6ed2b5d6779a3
Attachments:
DIGEST PLATFORM TYPE
sha256:217329d2af959d4f02e3a96dcbe62bf100cab1feb8006a047ddfe51a5397f7e3 https://slsa.dev/provenance/v0.2
Print build logs: docker buildx history logs g9808bwrjrlkbhdamxklx660b
```
```console
$ docker buildx history inspect --format json
{
"Name": "buildx (binaries)",
"Ref": "5w7vkqfi0rf59hw4hnmn627r9",
"Context": ".",
"Dockerfile": "Dockerfile",
"VCSRepository": "https://github.com/crazy-max/buildx.git",
"VCSRevision": "f15eaa1ee324ffbbab29605600d27a84cab86361",
"Target": "binaries",
"Platform": [
"linux/amd64"
],
"KeepGitDir": true,
"StartedAt": "2025-02-07T12:01:05.75807272+01:00",
"CompletedAt": "2025-02-07T12:02:07.991778875+01:00",
"Duration": 62233706155,
"Status": "completed",
"NumCompletedSteps": 16,
"NumTotalSteps": 16,
"NumCachedSteps": 4,
"Config": {
"ImageResolveMode": "local"
},
"Materials": [
{
"URI": "pkg:docker/docker/dockerfile@1",
"Digests": [
"sha256:93bfd3b68c109427185cd78b4779fc82b484b0b7618e36d0f104d4d801e66d25"
]
},
{
"URI": "pkg:docker/golang@1.23-alpine3.21?platform=linux%2Famd64",
"Digests": [
"sha256:2c49857f2295e89b23b28386e57e018a86620a8fede5003900f2d138ba9c4037"
]
},
{
"URI": "pkg:docker/tonistiigi/xx@1.6.1?platform=linux%2Famd64",
"Digests": [
"sha256:923441d7c25f1e2eb5789f82d987693c47b8ed987c4ab3b075d6ed2b5d6779a3"
]
}
],
"Attachments": [
{
"Digest": "sha256:450fdd2e6b868fecd69e9891c2c404ba461aa38a47663b4805edeb8d2baf80b1",
"Type": "https://slsa.dev/provenance/v0.2"
}
]
}
```
```console
$ docker buildx history inspect --format "{{.Name}}: {{.VCSRepository}} ({{.VCSRevision}})"
buildx (binaries): https://github.com/crazy-max/buildx.git (f15eaa1ee324ffbbab29605600d27a84cab86361)
```

View File

@@ -5,14 +5,12 @@ List build records
### Options ### Options
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:----------------|:--------------|:--------|:---------------------------------------------| |:----------------|:---------|:--------|:-----------------------------------------|
| `--builder` | `string` | | Override the configured builder instance | | `--builder` | `string` | | Override the configured builder instance |
| `-D`, `--debug` | `bool` | | Enable debug logging | | `-D`, `--debug` | `bool` | | Enable debug logging |
| `--filter` | `stringArray` | | Provide filter values (e.g., `status=error`) | | `--format` | `string` | `table` | Format the output |
| `--format` | `string` | `table` | Format the output | | `--no-trunc` | `bool` | | Don't truncate output |
| `--local` | `bool` | | List records for current repository only |
| `--no-trunc` | `bool` | | Don't truncate output |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->

View File

@@ -1,17 +0,0 @@
# docker buildx history trace
<!---MARKER_GEN_START-->
Show the OpenTelemetry trace of a build record
### Options
| Name | Type | Default | Description |
|:----------------|:---------|:--------------|:-----------------------------------------|
| `--addr` | `string` | `127.0.0.1:0` | Address to bind the UI server |
| `--builder` | `string` | | Override the configured builder instance |
| `--compare` | `string` | | Compare with another build reference |
| `-D`, `--debug` | `bool` | | Enable debug logging |
<!---MARKER_GEN_END-->

View File

@@ -23,10 +23,10 @@ import (
"github.com/docker/docker/api/types/mount" "github.com/docker/docker/api/types/mount"
"github.com/docker/docker/api/types/network" "github.com/docker/docker/api/types/network"
"github.com/docker/docker/api/types/system" "github.com/docker/docker/api/types/system"
dockerclient "github.com/docker/docker/client"
"github.com/docker/docker/errdefs" "github.com/docker/docker/errdefs"
dockerarchive "github.com/docker/docker/pkg/archive" dockerarchive "github.com/docker/docker/pkg/archive"
"github.com/docker/docker/pkg/idtools" "github.com/docker/docker/pkg/idtools"
"github.com/docker/docker/pkg/jsonmessage"
"github.com/docker/docker/pkg/stdcopy" "github.com/docker/docker/pkg/stdcopy"
"github.com/moby/buildkit/client" "github.com/moby/buildkit/client"
"github.com/pkg/errors" "github.com/pkg/errors"
@@ -56,7 +56,6 @@ type Driver struct {
restartPolicy container.RestartPolicy restartPolicy container.RestartPolicy
env []string env []string
defaultLoad bool defaultLoad bool
gpus []container.DeviceRequest
} }
func (d *Driver) IsMobyDriver() bool { func (d *Driver) IsMobyDriver() bool {
@@ -71,7 +70,7 @@ func (d *Driver) Bootstrap(ctx context.Context, l progress.Logger) error {
return progress.Wrap("[internal] booting buildkit", l, func(sub progress.SubLogger) error { return progress.Wrap("[internal] booting buildkit", l, func(sub progress.SubLogger) error {
_, err := d.DockerAPI.ContainerInspect(ctx, d.Name) _, err := d.DockerAPI.ContainerInspect(ctx, d.Name)
if err != nil { if err != nil {
if errdefs.IsNotFound(err) { if dockerclient.IsErrNotFound(err) {
return d.create(ctx, sub) return d.create(ctx, sub)
} }
return err return err
@@ -96,20 +95,19 @@ func (d *Driver) create(ctx context.Context, l progress.SubLogger) error {
if err != nil { if err != nil {
return err return err
} }
resp, err := d.DockerAPI.ImageCreate(ctx, imageName, image.CreateOptions{ rc, err := d.DockerAPI.ImageCreate(ctx, imageName, image.CreateOptions{
RegistryAuth: ra, RegistryAuth: ra,
}) })
if err != nil { if err != nil {
return err return err
} }
defer resp.Close() _, err = io.Copy(io.Discard, rc)
return jsonmessage.DisplayJSONMessagesStream(resp, io.Discard, 0, false, nil) return err
}); err != nil { }); err != nil {
// image pulling failed, check if it exists in local image store. // image pulling failed, check if it exists in local image store.
// if not, return pulling error. otherwise log it. // if not, return pulling error. otherwise log it.
_, errInspect := d.DockerAPI.ImageInspect(ctx, imageName) _, _, errInspect := d.DockerAPI.ImageInspectWithRaw(ctx, imageName)
found := errInspect == nil if errInspect != nil {
if !found {
return err return err
} }
l.Wrap("pulling failed, using local image "+imageName, func() error { return nil }) l.Wrap("pulling failed, using local image "+imageName, func() error { return nil })
@@ -159,9 +157,6 @@ func (d *Driver) create(ctx context.Context, l progress.SubLogger) error {
if d.cpusetMems != "" { if d.cpusetMems != "" {
hc.Resources.CpusetMems = d.cpusetMems hc.Resources.CpusetMems = d.cpusetMems
} }
if len(d.gpus) > 0 && d.hasGPUCapability(ctx, cfg.Image, d.gpus) {
hc.Resources.DeviceRequests = d.gpus
}
if info, err := d.DockerAPI.Info(ctx); err == nil { if info, err := d.DockerAPI.Info(ctx); err == nil {
if info.CgroupDriver == "cgroupfs" { if info.CgroupDriver == "cgroupfs" {
// Place all buildkit containers inside this cgroup by default so limits can be attached // Place all buildkit containers inside this cgroup by default so limits can be attached
@@ -311,7 +306,7 @@ func (d *Driver) start(ctx context.Context) error {
func (d *Driver) Info(ctx context.Context) (*driver.Info, error) { func (d *Driver) Info(ctx context.Context) (*driver.Info, error) {
ctn, err := d.DockerAPI.ContainerInspect(ctx, d.Name) ctn, err := d.DockerAPI.ContainerInspect(ctx, d.Name)
if err != nil { if err != nil {
if errdefs.IsNotFound(err) { if dockerclient.IsErrNotFound(err) {
return &driver.Info{ return &driver.Info{
Status: driver.Inactive, Status: driver.Inactive,
}, nil }, nil
@@ -424,7 +419,6 @@ func (d *Driver) Features(ctx context.Context) map[driver.Feature]bool {
driver.DockerExporter: true, driver.DockerExporter: true,
driver.CacheExport: true, driver.CacheExport: true,
driver.MultiPlatform: true, driver.MultiPlatform: true,
driver.DirectPush: true,
driver.DefaultLoad: d.defaultLoad, driver.DefaultLoad: d.defaultLoad,
} }
} }
@@ -433,31 +427,6 @@ func (d *Driver) HostGatewayIP(ctx context.Context) (net.IP, error) {
return nil, errors.New("host-gateway is not supported by the docker-container driver") return nil, errors.New("host-gateway is not supported by the docker-container driver")
} }
// hasGPUCapability checks if docker daemon has GPU capability. We need to run
// a dummy container with GPU device to check if the daemon has this capability
// because there is no API to check it yet.
func (d *Driver) hasGPUCapability(ctx context.Context, image string, gpus []container.DeviceRequest) bool {
cfg := &container.Config{
Image: image,
Entrypoint: []string{"/bin/true"},
}
hc := &container.HostConfig{
NetworkMode: container.NetworkMode(container.IPCModeNone),
AutoRemove: true,
Resources: container.Resources{
DeviceRequests: gpus,
},
}
resp, err := d.DockerAPI.ContainerCreate(ctx, cfg, hc, &network.NetworkingConfig{}, nil, "")
if err != nil {
return false
}
if err := d.DockerAPI.ContainerStart(ctx, resp.ID, container.StartOptions{}); err != nil {
return false
}
return true
}
func demuxConn(c net.Conn) net.Conn { func demuxConn(c net.Conn) net.Conn {
pr, pw := io.Pipe() pr, pw := io.Pipe()
// TODO: rewrite parser with Reader() to avoid goroutine switch // TODO: rewrite parser with Reader() to avoid goroutine switch

View File

@@ -51,12 +51,6 @@ func (f *factory) New(ctx context.Context, cfg driver.InitConfig) (driver.Driver
InitConfig: cfg, InitConfig: cfg,
restartPolicy: rp, restartPolicy: rp,
} }
var gpus dockeropts.GpuOpts
if err := gpus.Set("all"); err == nil {
if v := gpus.Value(); len(v) > 0 {
d.gpus = v
}
}
for k, v := range cfg.DriverOpts { for k, v := range cfg.DriverOpts {
switch { switch {
case k == "network": case k == "network":

View File

@@ -93,7 +93,6 @@ func (d *Driver) Features(ctx context.Context) map[driver.Feature]bool {
driver.DockerExporter: useContainerdSnapshotter, driver.DockerExporter: useContainerdSnapshotter,
driver.CacheExport: useContainerdSnapshotter, driver.CacheExport: useContainerdSnapshotter,
driver.MultiPlatform: useContainerdSnapshotter, driver.MultiPlatform: useContainerdSnapshotter,
driver.DirectPush: useContainerdSnapshotter,
driver.DefaultLoad: true, driver.DefaultLoad: true,
} }
}) })

View File

@@ -7,6 +7,5 @@ const DockerExporter Feature = "Docker exporter"
const CacheExport Feature = "Cache export" const CacheExport Feature = "Cache export"
const MultiPlatform Feature = "Multi-platform build" const MultiPlatform Feature = "Multi-platform build"
const DirectPush Feature = "Direct push"
const DefaultLoad Feature = "Automatically load images to the Docker Engine image store" const DefaultLoad Feature = "Automatically load images to the Docker Engine image store"

View File

@@ -35,10 +35,10 @@ func testEndpoint(server, defaultNamespace string, ca, cert, key []byte, skipTLS
} }
var testStoreCfg = store.NewConfig( var testStoreCfg = store.NewConfig(
func() any { func() interface{} {
return &map[string]any{} return &map[string]interface{}{}
}, },
store.EndpointTypeGetter(KubernetesEndpoint, func() any { return &EndpointMeta{} }), store.EndpointTypeGetter(KubernetesEndpoint, func() interface{} { return &EndpointMeta{} }),
) )
func TestSaveLoadContexts(t *testing.T) { func TestSaveLoadContexts(t *testing.T) {
@@ -197,7 +197,7 @@ func checkClientConfig(t *testing.T, ep Endpoint, server, namespace string, ca,
func save(s store.Writer, ep Endpoint, name string) error { func save(s store.Writer, ep Endpoint, name string) error {
meta := store.Metadata{ meta := store.Metadata{
Endpoints: map[string]any{ Endpoints: map[string]interface{}{
KubernetesEndpoint: ep.EndpointMeta, KubernetesEndpoint: ep.EndpointMeta,
}, },
Name: name, Name: name,

View File

@@ -43,7 +43,7 @@ type Endpoint struct {
func init() { func init() {
command.RegisterDefaultStoreEndpoints( command.RegisterDefaultStoreEndpoints(
store.EndpointTypeGetter(KubernetesEndpoint, func() any { return &EndpointMeta{} }), store.EndpointTypeGetter(KubernetesEndpoint, func() interface{} { return &EndpointMeta{} }),
) )
} }
@@ -96,7 +96,7 @@ func (c *Endpoint) KubernetesConfig() clientcmd.ClientConfig {
// ResolveDefault returns endpoint metadata for the default Kubernetes // ResolveDefault returns endpoint metadata for the default Kubernetes
// endpoint, which is derived from the env-based kubeconfig. // endpoint, which is derived from the env-based kubeconfig.
func (c *EndpointMeta) ResolveDefault() (any, *store.EndpointTLSData, error) { func (c *EndpointMeta) ResolveDefault() (interface{}, *store.EndpointTLSData, error) {
kubeconfig := os.Getenv("KUBECONFIG") kubeconfig := os.Getenv("KUBECONFIG")
if kubeconfig == "" { if kubeconfig == "" {
kubeconfig = filepath.Join(homedir.Get(), ".kube/config") kubeconfig = filepath.Join(homedir.Get(), ".kube/config")

View File

@@ -238,7 +238,6 @@ func (d *Driver) Features(_ context.Context) map[driver.Feature]bool {
driver.DockerExporter: d.DockerAPI != nil, driver.DockerExporter: d.DockerAPI != nil,
driver.CacheExport: true, driver.CacheExport: true,
driver.MultiPlatform: true, // Untested (needs multiple Driver instances) driver.MultiPlatform: true, // Untested (needs multiple Driver instances)
driver.DirectPush: true,
driver.DefaultLoad: d.defaultLoad, driver.DefaultLoad: d.defaultLoad,
} }
} }

View File

@@ -90,7 +90,7 @@ func ListRunningPods(ctx context.Context, client clientcorev1.PodInterface, depl
for i := range podList.Items { for i := range podList.Items {
pod := &podList.Items[i] pod := &podList.Items[i]
if pod.Status.Phase == corev1.PodRunning { if pod.Status.Phase == corev1.PodRunning {
logrus.Debugf("pod running: %q", pod.Name) logrus.Debugf("pod runnning: %q", pod.Name)
runningPods = append(runningPods, pod) runningPods = append(runningPods, pod)
} }
} }

View File

@@ -25,7 +25,7 @@ func GenerateNodeName(builderName string, txn *store.Txn) (string, error) {
} }
var name string var name string
for range 6 { for i := 0; i < 6; i++ {
name, err = randomName() name, err = randomName()
if err != nil { if err != nil {
return "", err return "", err

View File

@@ -28,7 +28,7 @@ type Driver struct {
*tlsOpts *tlsOpts
defaultLoad bool defaultLoad bool
// remote driver caches the client because its Bootstrap/Info methods reuse it internally // remote driver caches the client because its Bootstap/Info methods reuse it internally
clientOnce sync.Once clientOnce sync.Once
client *client.Client client *client.Client
err error err error
@@ -164,7 +164,6 @@ func (d *Driver) Features(ctx context.Context) map[driver.Feature]bool {
driver.DockerExporter: true, driver.DockerExporter: true,
driver.CacheExport: true, driver.CacheExport: true,
driver.MultiPlatform: true, driver.MultiPlatform: true,
driver.DirectPush: true,
driver.DefaultLoad: d.defaultLoad, driver.DefaultLoad: d.defaultLoad,
} }
} }

66
go.mod
View File

@@ -1,14 +1,14 @@
module github.com/docker/buildx module github.com/docker/buildx
go 1.23.0 go 1.22.0
require ( require (
github.com/Masterminds/semver/v3 v3.2.1 github.com/Masterminds/semver/v3 v3.2.1
github.com/Microsoft/go-winio v0.6.2 github.com/Microsoft/go-winio v0.6.2
github.com/aws/aws-sdk-go-v2/config v1.27.27 github.com/aws/aws-sdk-go-v2/config v1.27.27
github.com/compose-spec/compose-go/v2 v2.4.9 github.com/compose-spec/compose-go/v2 v2.4.7
github.com/containerd/console v1.0.4 github.com/containerd/console v1.0.4
github.com/containerd/containerd/v2 v2.0.4 github.com/containerd/containerd/v2 v2.0.2
github.com/containerd/continuity v0.4.5 github.com/containerd/continuity v0.4.5
github.com/containerd/errdefs v1.0.0 github.com/containerd/errdefs v1.0.0
github.com/containerd/log v0.1.0 github.com/containerd/log v0.1.0
@@ -17,9 +17,9 @@ require (
github.com/creack/pty v1.1.24 github.com/creack/pty v1.1.24
github.com/davecgh/go-spew v1.1.1 github.com/davecgh/go-spew v1.1.1
github.com/distribution/reference v0.6.0 github.com/distribution/reference v0.6.0
github.com/docker/cli v28.0.4+incompatible github.com/docker/cli v27.5.0+incompatible
github.com/docker/cli-docs-tool v0.9.0 github.com/docker/cli-docs-tool v0.9.0
github.com/docker/docker v28.0.4+incompatible github.com/docker/docker v27.5.0+incompatible
github.com/docker/go-units v0.5.0 github.com/docker/go-units v0.5.0
github.com/gofrs/flock v0.12.1 github.com/gofrs/flock v0.12.1
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510
@@ -29,37 +29,36 @@ require (
github.com/hashicorp/hcl/v2 v2.23.0 github.com/hashicorp/hcl/v2 v2.23.0
github.com/in-toto/in-toto-golang v0.5.0 github.com/in-toto/in-toto-golang v0.5.0
github.com/mitchellh/hashstructure/v2 v2.0.2 github.com/mitchellh/hashstructure/v2 v2.0.2
github.com/moby/buildkit v0.21.0-rc1 github.com/moby/buildkit v0.19.0
github.com/moby/sys/mountinfo v0.7.2 github.com/moby/sys/mountinfo v0.7.2
github.com/moby/sys/signal v0.7.1 github.com/moby/sys/signal v0.7.1
github.com/morikuni/aec v1.0.0 github.com/morikuni/aec v1.0.0
github.com/opencontainers/go-digest v1.0.0 github.com/opencontainers/go-digest v1.0.0
github.com/opencontainers/image-spec v1.1.1 github.com/opencontainers/image-spec v1.1.0
github.com/pelletier/go-toml v1.9.5 github.com/pelletier/go-toml v1.9.5
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10
github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b
github.com/sirupsen/logrus v1.9.3 github.com/sirupsen/logrus v1.9.3
github.com/spf13/cobra v1.9.1 github.com/spf13/cobra v1.8.1
github.com/spf13/pflag v1.0.6 github.com/spf13/pflag v1.0.5
github.com/stretchr/testify v1.10.0 github.com/stretchr/testify v1.10.0
github.com/tonistiigi/fsutil v0.0.0-20250318190121-d73a4b3b8a7e github.com/tonistiigi/fsutil v0.0.0-20250113203817-b14e27f4135a
github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4 github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4
github.com/tonistiigi/jaeger-ui-rest v0.0.0-20250408171107-3dd17559e117
github.com/zclconf/go-cty v1.16.0 github.com/zclconf/go-cty v1.16.0
go.opentelemetry.io/otel v1.31.0 go.opentelemetry.io/otel v1.31.0
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.31.0
go.opentelemetry.io/otel/metric v1.31.0 go.opentelemetry.io/otel/metric v1.31.0
go.opentelemetry.io/otel/sdk v1.31.0 go.opentelemetry.io/otel/sdk v1.31.0
go.opentelemetry.io/otel/trace v1.31.0 go.opentelemetry.io/otel/trace v1.31.0
golang.org/x/mod v0.24.0 golang.org/x/exp v0.0.0-20240909161429-701f63a606c0
golang.org/x/sync v0.13.0 golang.org/x/mod v0.21.0
golang.org/x/sys v0.32.0 golang.org/x/sync v0.10.0
golang.org/x/term v0.31.0 golang.org/x/sys v0.28.0
golang.org/x/text v0.24.0 golang.org/x/term v0.27.0
golang.org/x/text v0.21.0
google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38 google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38
google.golang.org/grpc v1.69.4 google.golang.org/grpc v1.68.1
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1 google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1
google.golang.org/protobuf v1.35.2 google.golang.org/protobuf v1.35.2
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
@@ -70,7 +69,8 @@ require (
require ( require (
github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 // indirect github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 // indirect
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d // indirect
github.com/agext/levenshtein v1.2.3 // indirect github.com/agext/levenshtein v1.2.3 // indirect
github.com/apparentlymart/go-cidr v1.0.1 // indirect github.com/apparentlymart/go-cidr v1.0.1 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
@@ -94,10 +94,10 @@ require (
github.com/containerd/ttrpc v1.2.7 // indirect github.com/containerd/ttrpc v1.2.7 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.6 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.6 // indirect
github.com/docker/distribution v2.8.3+incompatible // indirect github.com/docker/distribution v2.8.3+incompatible // indirect
github.com/docker/docker-credential-helpers v0.9.3 // indirect github.com/docker/docker-credential-helpers v0.8.2 // indirect
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c // indirect
github.com/docker/go-connections v0.5.0 // indirect github.com/docker/go-connections v0.5.0 // indirect
github.com/docker/go-metrics v0.0.1 // indirect github.com/docker/go-metrics v0.0.1 // indirect
github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 // indirect
github.com/emicklei/go-restful/v3 v3.11.0 // indirect github.com/emicklei/go-restful/v3 v3.11.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/fvbommel/sortorder v1.0.1 // indirect github.com/fvbommel/sortorder v1.0.1 // indirect
@@ -111,7 +111,7 @@ require (
github.com/gogo/protobuf v1.3.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/protobuf v1.5.4 // indirect github.com/golang/protobuf v1.5.4 // indirect
github.com/google/gnostic-models v0.6.8 // indirect github.com/google/gnostic-models v0.6.8 // indirect
github.com/google/go-cmp v0.7.0 // indirect github.com/google/go-cmp v0.6.0 // indirect
github.com/google/gofuzz v1.2.0 // indirect github.com/google/gofuzz v1.2.0 // indirect
github.com/gorilla/mux v1.8.1 // indirect github.com/gorilla/mux v1.8.1 // indirect
github.com/gorilla/websocket v1.5.0 // indirect github.com/gorilla/websocket v1.5.0 // indirect
@@ -122,19 +122,21 @@ require (
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/josharian/intern v1.0.0 // indirect github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/compress v1.17.11 // indirect
github.com/mailru/easyjson v0.7.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mattn/go-shellwords v1.0.12 // indirect github.com/mattn/go-shellwords v1.0.12 // indirect
github.com/miekg/pkcs11 v1.1.1 // indirect
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/moby/docker-image-spec v1.3.1 // indirect github.com/moby/docker-image-spec v1.3.1 // indirect
github.com/moby/locker v1.0.1 // indirect github.com/moby/locker v1.0.1 // indirect
github.com/moby/patternmatcher v0.6.0 // indirect github.com/moby/patternmatcher v0.6.0 // indirect
github.com/moby/spdystream v0.4.0 // indirect github.com/moby/spdystream v0.4.0 // indirect
github.com/moby/sys/sequential v0.6.0 // indirect github.com/moby/sys/sequential v0.6.0 // indirect
github.com/moby/sys/user v0.4.0 // indirect github.com/moby/sys/user v0.3.0 // indirect
github.com/moby/sys/userns v0.1.0 // indirect github.com/moby/sys/userns v0.1.0 // indirect
github.com/moby/term v0.5.2 // indirect github.com/moby/term v0.5.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
@@ -149,14 +151,13 @@ require (
github.com/secure-systems-lab/go-securesystemslib v0.4.0 // indirect github.com/secure-systems-lab/go-securesystemslib v0.4.0 // indirect
github.com/shibumi/go-pathspec v1.3.0 // indirect github.com/shibumi/go-pathspec v1.3.0 // indirect
github.com/theupdateframework/notary v0.7.0 // indirect github.com/theupdateframework/notary v0.7.0 // indirect
github.com/tonistiigi/dchapes-mode v0.0.0-20250318174251-73d941a28323 // indirect github.com/tonistiigi/dchapes-mode v0.0.0-20241001053921-ca0759fec205 // indirect
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea // indirect github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea // indirect
github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab // indirect github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab // indirect
github.com/x448/float16 v0.8.4 // indirect github.com/x448/float16 v0.8.4 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/xeipuuv/gojsonschema v1.2.0 // indirect github.com/xeipuuv/gojsonschema v1.2.0 // indirect
github.com/xhit/go-str2duration/v2 v2.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.56.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.56.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.56.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.56.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.56.0 // indirect
@@ -167,13 +168,12 @@ require (
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.31.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.31.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.31.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.31.0 // indirect
go.opentelemetry.io/proto/otlp v1.3.1 // indirect go.opentelemetry.io/proto/otlp v1.3.1 // indirect
golang.org/x/crypto v0.37.0 // indirect golang.org/x/crypto v0.31.0 // indirect
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 // indirect golang.org/x/net v0.33.0 // indirect
golang.org/x/net v0.39.0 // indirect
golang.org/x/oauth2 v0.23.0 // indirect golang.org/x/oauth2 v0.23.0 // indirect
golang.org/x/time v0.11.0 // indirect golang.org/x/time v0.6.0 // indirect
golang.org/x/tools v0.32.0 // indirect golang.org/x/tools v0.25.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20241021214115-324edc3d5d38 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9 // indirect
gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/inf.v0 v0.9.1 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect
k8s.io/klog/v2 v2.130.1 // indirect k8s.io/klog/v2 v2.130.1 // indirect

154
go.sum
View File

@@ -2,8 +2,9 @@ github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8af
github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20231105174938-2b5cbb29f3e2 h1:dIScnXFlF784X79oi7MzVT6GWqr/W1uUt0pB5CsDs9M= github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20231105174938-2b5cbb29f3e2 h1:dIScnXFlF784X79oi7MzVT6GWqr/W1uUt0pB5CsDs9M=
github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20231105174938-2b5cbb29f3e2/go.mod h1:gCLVsLfv1egrcZu+GoJATN5ts75F2s62ih/457eWzOw= github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20231105174938-2b5cbb29f3e2/go.mod h1:gCLVsLfv1egrcZu+GoJATN5ts75F2s62ih/457eWzOw=
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
@@ -12,6 +13,8 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA
github.com/Microsoft/hcsshim v0.12.9 h1:2zJy5KA+l0loz1HzEGqyNnjd3fyZA31ZBCGKacp6lLg= github.com/Microsoft/hcsshim v0.12.9 h1:2zJy5KA+l0loz1HzEGqyNnjd3fyZA31ZBCGKacp6lLg=
github.com/Microsoft/hcsshim v0.12.9/go.mod h1:fJ0gkFAna6ukt0bLdKB8djt4XIJhF/vEPuoIWYVvZ8Y= github.com/Microsoft/hcsshim v0.12.9/go.mod h1:fJ0gkFAna6ukt0bLdKB8djt4XIJhF/vEPuoIWYVvZ8Y=
github.com/Shopify/logrus-bugsnag v0.0.0-20170309145241-6dbc35f2c30d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= github.com/Shopify/logrus-bugsnag v0.0.0-20170309145241-6dbc35f2c30d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d h1:UrqY+r/OJnIp5u0s1SbQ8dVfLCZJsnvazdBP5hS4iRs=
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo=
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
@@ -60,26 +63,30 @@ github.com/bitly/go-hostpool v0.1.0/go.mod h1:4gOCgp6+NZnVqlKyZ/iBZFTAJKembaVENU
github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA=
github.com/bmatcuk/doublestar v1.1.5/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= github.com/bmatcuk/doublestar v1.1.5/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/bugsnag/bugsnag-go v1.0.5-0.20150529004307-13fd6b8acda0 h1:s7+5BfS4WFJoVF9pnB8kBk03S7pZXRdKamnV0FOl5Sc=
github.com/bugsnag/bugsnag-go v1.0.5-0.20150529004307-13fd6b8acda0/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= github.com/bugsnag/bugsnag-go v1.0.5-0.20150529004307-13fd6b8acda0/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8=
github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b h1:otBG+dV+YK+Soembjv71DPz3uX/V/6MMlSyD9JBQ6kQ=
github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50=
github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0 h1:nvj0OLI3YqYXer/kZD8Ri1aaunCxIEsOst1BVJswV0o=
github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE=
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004 h1:lkAMpLVBDaj17e85keuznYcH5rqI438v41pKcBl4ZxQ=
github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004/go.mod h1:yMWuSON2oQp+43nFtAV/uvKQIFpSPerB57DCt9t8sSA= github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004/go.mod h1:yMWuSON2oQp+43nFtAV/uvKQIFpSPerB57DCt9t8sSA=
github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb h1:EDmT6Q9Zs+SbUoc7Ik9EfrFqcylYqgPZ9ANSbTAntnE= github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb h1:EDmT6Q9Zs+SbUoc7Ik9EfrFqcylYqgPZ9ANSbTAntnE=
github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb/go.mod h1:ZjrT6AXHbDs86ZSdt/osfBi5qfexBrKUdONk989Wnk4= github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb/go.mod h1:ZjrT6AXHbDs86ZSdt/osfBi5qfexBrKUdONk989Wnk4=
github.com/compose-spec/compose-go/v2 v2.4.9 h1:2K4TDw+1ba2idiR6empXHKRXvWYpnvAKoNQy93/sSOs= github.com/compose-spec/compose-go/v2 v2.4.7 h1:WNpz5bIbKG+G+w9pfu72B1ZXr+Og9jez8TMEo8ecXPk=
github.com/compose-spec/compose-go/v2 v2.4.9/go.mod h1:6k5l/0TxCg0/2uLEhRVEsoBWBprS2uvZi32J7xub3lo= github.com/compose-spec/compose-go/v2 v2.4.7/go.mod h1:lFN0DrMxIncJGYAXTfWuajfwj5haBJqrBkarHcnjJKc=
github.com/containerd/cgroups/v3 v3.0.5 h1:44na7Ud+VwyE7LIoJ8JTNQOa549a8543BmzaJHo6Bzo= github.com/containerd/cgroups/v3 v3.0.3 h1:S5ByHZ/h9PMe5IOQoN7E+nMc2UcLEM/V48DGDJ9kip0=
github.com/containerd/cgroups/v3 v3.0.5/go.mod h1:SA5DLYnXO8pTGYiAHXz94qvLQTKfVM5GEVisn4jpins= github.com/containerd/cgroups/v3 v3.0.3/go.mod h1:8HBe7V3aWGLFPd/k03swSIsGjZhHI2WzJmticMgVuz0=
github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro= github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro=
github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk= github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk=
github.com/containerd/containerd/api v1.8.0 h1:hVTNJKR8fMc/2Tiw60ZRijntNMd1U+JVMyTRdsD2bS0= github.com/containerd/containerd/api v1.8.0 h1:hVTNJKR8fMc/2Tiw60ZRijntNMd1U+JVMyTRdsD2bS0=
github.com/containerd/containerd/api v1.8.0/go.mod h1:dFv4lt6S20wTu/hMcP4350RL87qPWLVa/OHOwmmdnYc= github.com/containerd/containerd/api v1.8.0/go.mod h1:dFv4lt6S20wTu/hMcP4350RL87qPWLVa/OHOwmmdnYc=
github.com/containerd/containerd/v2 v2.0.4 h1:+r7yJMwhTfMm3CDyiBjMBQO8a9CTBxL2Bg/JtqtIwB8= github.com/containerd/containerd/v2 v2.0.2 h1:GmH/tRBlTvrXOLwSpWE2vNAm8+MqI6nmxKpKBNKY8Wc=
github.com/containerd/containerd/v2 v2.0.4/go.mod h1:5j9QUUaV/cy9ZeAx4S+8n9ffpf+iYnEj4jiExgcbuLY= github.com/containerd/containerd/v2 v2.0.2/go.mod h1:wIqEvQ/6cyPFUGJ5yMFanspPabMLor+bF865OHvNTTI=
github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4= github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4=
github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE= github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE=
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
@@ -103,6 +110,7 @@ github.com/containerd/ttrpc v1.2.7 h1:qIrroQvuOL9HQ1X6KHe2ohc7p+HP/0VE6XPU7elJRq
github.com/containerd/ttrpc v1.2.7/go.mod h1:YCXHsb32f+Sq5/72xHubdiJRQY9inL4a4ZQrAbN1q9o= github.com/containerd/ttrpc v1.2.7/go.mod h1:YCXHsb32f+Sq5/72xHubdiJRQY9inL4a4ZQrAbN1q9o=
github.com/containerd/typeurl/v2 v2.2.3 h1:yNA/94zxWdvYACdYO8zofhrTVuQY73fFU1y++dYSw40= github.com/containerd/typeurl/v2 v2.2.3 h1:yNA/94zxWdvYACdYO8zofhrTVuQY73fFU1y++dYSw40=
github.com/containerd/typeurl/v2 v2.2.3/go.mod h1:95ljDnPfD3bAbDJRugOiShd/DlAAsxGtUBhJxIn7SCk= github.com/containerd/typeurl/v2 v2.2.3/go.mod h1:95ljDnPfD3bAbDJRugOiShd/DlAAsxGtUBhJxIn7SCk=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cpuguy83/go-md2man/v2 v2.0.6 h1:XJtiaUW6dEEqVuZiMTn1ldk455QWwEIsMIJlo5vtkx0= github.com/cpuguy83/go-md2man/v2 v2.0.6 h1:XJtiaUW6dEEqVuZiMTn1ldk455QWwEIsMIJlo5vtkx0=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
@@ -114,17 +122,17 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/denisenkom/go-mssqldb v0.0.0-20191128021309-1d7a30a10f73/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= github.com/denisenkom/go-mssqldb v0.0.0-20191128021309-1d7a30a10f73/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
github.com/docker/cli v28.0.4+incompatible h1:pBJSJeNd9QeIWPjRcV91RVJihd/TXB77q1ef64XEu4A= github.com/docker/cli v27.5.0+incompatible h1:aMphQkcGtpHixwwhAXJT1rrK/detk2JIvDaFkLctbGM=
github.com/docker/cli v28.0.4+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/cli v27.5.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/cli-docs-tool v0.9.0 h1:CVwQbE+ZziwlPqrJ7LRyUF6GvCA+6gj7MTCsayaK9t0= github.com/docker/cli-docs-tool v0.9.0 h1:CVwQbE+ZziwlPqrJ7LRyUF6GvCA+6gj7MTCsayaK9t0=
github.com/docker/cli-docs-tool v0.9.0/go.mod h1:ClrwlNW+UioiRyH9GiAOe1o3J/TsY3Tr1ipoypjAUtc= github.com/docker/cli-docs-tool v0.9.0/go.mod h1:ClrwlNW+UioiRyH9GiAOe1o3J/TsY3Tr1ipoypjAUtc=
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk=
github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/docker v28.0.4+incompatible h1:JNNkBctYKurkw6FrHfKqY0nKIDf5nrbxjVBtS+cdcok= github.com/docker/docker v27.5.0+incompatible h1:um++2NcQtGRTz5eEgO6aJimo6/JxrTXC941hd05JO6U=
github.com/docker/docker v28.0.4+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v27.5.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker-credential-helpers v0.9.3 h1:gAm/VtF9wgqJMoxzT3Gj5p4AqIjCBS4wrsOh9yRqcz8= github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo=
github.com/docker/docker-credential-helpers v0.9.3/go.mod h1:x+4Gbw9aGmChi3qTLZj8Dfn0TD20M/fuWy0E5+WDeCo= github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M=
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0= github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0=
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q= github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
@@ -144,6 +152,8 @@ github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/fvbommel/sortorder v1.0.1 h1:dSnXLt4mJYH25uDDGa3biZNQsozaUWDSWeKJ0qqFfzE= github.com/fvbommel/sortorder v1.0.1 h1:dSnXLt4mJYH25uDDGa3biZNQsozaUWDSWeKJ0qqFfzE=
github.com/fvbommel/sortorder v1.0.1/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= github.com/fvbommel/sortorder v1.0.1/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E= github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E=
@@ -163,6 +173,7 @@ github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En
github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
github.com/go-openapi/swag v0.22.4 h1:QLMzNJnMGPRNDCbySlcj1x01tzU8/9LTTL9hZZZogBU= github.com/go-openapi/swag v0.22.4 h1:QLMzNJnMGPRNDCbySlcj1x01tzU8/9LTTL9hZZZogBU=
github.com/go-openapi/swag v0.22.4/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= github.com/go-openapi/swag v0.22.4/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
github.com/go-sql-driver/mysql v1.3.0 h1:pgwjLi/dvffoP9aabwkT3AKpXQM93QARkjFhDDqC1UE=
github.com/go-sql-driver/mysql v1.3.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.3.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
@@ -187,14 +198,15 @@ github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/certificate-transparency-go v1.0.10-0.20180222191210-5ab67e519c93 h1:jc2UWq7CbdszqeH6qu1ougXMIUBfSy8Pbh/anURYbGI=
github.com/google/certificate-transparency-go v1.0.10-0.20180222191210-5ab67e519c93/go.mod h1:QeJfpSbVSfYc7RgB3gJFj9cbuQMMchQxrWXz8Ruopmg= github.com/google/certificate-transparency-go v1.0.10-0.20180222191210-5ab67e519c93/go.mod h1:QeJfpSbVSfYc7RgB3gJFj9cbuQMMchQxrWXz8Ruopmg=
github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I=
github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@@ -212,6 +224,7 @@ github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWm
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 h1:asbCHRVmodnJTuQ3qamDwqVOIjwqUPTYmYuemVOx+Ys= github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 h1:asbCHRVmodnJTuQ3qamDwqVOIjwqUPTYmYuemVOx+Ys=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0/go.mod h1:ggCgvZ2r7uOoQjOyu2Y1NhHmEPPzzuhWgcza5M1Ji1I= github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0/go.mod h1:ggCgvZ2r7uOoQjOyu2Y1NhHmEPPzzuhWgcza5M1Ji1I=
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8=
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
@@ -232,7 +245,9 @@ github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1Gd
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jinzhu/gorm v0.0.0-20170222002820-5409931a1bb8 h1:CZkYfurY6KGhVtlalI4QwQ6T0Cu6iuY3e0x5RLu96WE=
github.com/jinzhu/gorm v0.0.0-20170222002820-5409931a1bb8/go.mod h1:Vla75njaFJ8clLU1W44h34PjIkijhjHIYnZxMqCdxqo= github.com/jinzhu/gorm v0.0.0-20170222002820-5409931a1bb8/go.mod h1:Vla75njaFJ8clLU1W44h34PjIkijhjHIYnZxMqCdxqo=
github.com/jinzhu/inflection v0.0.0-20170102125226-1c35d901db3d h1:jRQLvyVGL+iVtDElaEIDdKwpPqUIZJfzkNLV34htpEc=
github.com/jinzhu/inflection v0.0.0-20170102125226-1c35d901db3d/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/inflection v0.0.0-20170102125226-1c35d901db3d/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
@@ -245,8 +260,8 @@ github.com/juju/loggo v0.0.0-20190526231331-6e530bcce5d8/go.mod h1:vgyd7OREkbtVE
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
@@ -261,6 +276,7 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/lib/pq v0.0.0-20150723085316-0dad96c0b94f/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v0.0.0-20150723085316-0dad96c0b94f/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/magiconair/properties v1.5.3 h1:C8fxWnhYyME3n0klPOhVM7PtYUB3eV1W3DeFmN3j53Y=
github.com/magiconair/properties v1.5.3/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.5.3/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
@@ -279,8 +295,10 @@ github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZX
github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4= github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4=
github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE= github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE=
github.com/mitchellh/mapstructure v0.0.0-20150613213606-2caf8efc9366/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v0.0.0-20150613213606-2caf8efc9366/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/moby/buildkit v0.21.0-rc1 h1:QWTyHpHUtsyUMH0CH7QStisI/FmS9njRC1FK4vVYeaE= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/moby/buildkit v0.21.0-rc1/go.mod h1:coiVDxJmP1PD+79HAnTJvBMetLTdCws8gpWiYX2vcH8= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/moby/buildkit v0.19.0 h1:w9G1p7sArvCGNkpWstAqJfRQTXBKukMyMK1bsah1HNo=
github.com/moby/buildkit v0.19.0/go.mod h1:WiHBFTgWV8eB1AmPxIWsAlKjUACAwm3X/14xOV4VWew=
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg= github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg=
@@ -295,12 +313,12 @@ github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7z
github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko=
github.com/moby/sys/signal v0.7.1 h1:PrQxdvxcGijdo6UXXo/lU/TvHUWyPhj7UOpSo8tuvk0= github.com/moby/sys/signal v0.7.1 h1:PrQxdvxcGijdo6UXXo/lU/TvHUWyPhj7UOpSo8tuvk0=
github.com/moby/sys/signal v0.7.1/go.mod h1:Se1VGehYokAkrSQwL4tDzHvETwUZlnY7S5XtQ50mQp8= github.com/moby/sys/signal v0.7.1/go.mod h1:Se1VGehYokAkrSQwL4tDzHvETwUZlnY7S5XtQ50mQp8=
github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= github.com/moby/sys/user v0.3.0 h1:9ni5DlcW5an3SvRSx4MouotOygvzaXbaSrc/wGDFWPo=
github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= github.com/moby/sys/user v0.3.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs=
github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g=
github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28=
github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@@ -329,12 +347,15 @@ github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
github.com/opencontainers/runtime-spec v1.2.0 h1:z97+pHb3uELt/yiAWD691HNHQIF07bE7dzrbT927iTk= github.com/opencontainers/runtime-spec v1.2.0 h1:z97+pHb3uELt/yiAWD691HNHQIF07bE7dzrbT927iTk=
github.com/opencontainers/runtime-spec v1.2.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.2.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-tools v0.9.1-0.20221107090550-2e043c6bd626 h1:DmNGcqH3WDbV5k8OJ+esPWbqUOX5rMLR2PMvziDMJi0=
github.com/opencontainers/runtime-tools v0.9.1-0.20221107090550-2e043c6bd626/go.mod h1:BRHJJd0E+cx42OybVYSgUvZmU0B8P9gZuRXlZUP7TKI=
github.com/opencontainers/selinux v1.11.1 h1:nHFvthhM0qY8/m+vfhJylliSshm8G1jJ2jDMcgULaH8= github.com/opencontainers/selinux v1.11.1 h1:nHFvthhM0qY8/m+vfhJylliSshm8G1jJ2jDMcgULaH8=
github.com/opencontainers/selinux v1.11.1/go.mod h1:E5dMC3VPuVvVHDYmi78qvhJp8+M586T4DlDRYpFkyec= github.com/opencontainers/selinux v1.11.1/go.mod h1:E5dMC3VPuVvVHDYmi78qvhJp8+M586T4DlDRYpFkyec=
github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
@@ -389,14 +410,17 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/spdx/tools-golang v0.5.3 h1:ialnHeEYUC4+hkm5vJm4qz2x+oEJbS0mAMFrNXdQraY= github.com/spdx/tools-golang v0.5.3 h1:ialnHeEYUC4+hkm5vJm4qz2x+oEJbS0mAMFrNXdQraY=
github.com/spdx/tools-golang v0.5.3/go.mod h1:/ETOahiAo96Ob0/RAIBmFZw6XN0yTnyr/uFZm2NTMhI= github.com/spdx/tools-golang v0.5.3/go.mod h1:/ETOahiAo96Ob0/RAIBmFZw6XN0yTnyr/uFZm2NTMhI=
github.com/spf13/cast v0.0.0-20150508191742-4d07383ffe94 h1:JmfC365KywYwHB946TTiQWEb8kqPY+pybPLoGE9GgVk=
github.com/spf13/cast v0.0.0-20150508191742-4d07383ffe94/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg= github.com/spf13/cast v0.0.0-20150508191742-4d07383ffe94/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg=
github.com/spf13/cobra v0.0.1/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v0.0.1/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431 h1:XTHrT015sxHyJ5FnQ0AeemSspZWaDq7DoTRW0EVsDCE=
github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/pflag v1.0.0/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.0/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c h1:2EejZtjFjKJGk71ANb+wtFK5EjUzUkEM3R0xnp559xg=
github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM= github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
@@ -413,16 +437,16 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635 h1:kdXcSzyDtseVEc4yCz2qF8ZrQvIDBJLl4S1c3GCXmoI=
github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c= github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c=
github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw=
github.com/tonistiigi/dchapes-mode v0.0.0-20250318174251-73d941a28323 h1:r0p7fK56l8WPequOaR3i9LBqfPtEdXIQbUTzT55iqT4= github.com/tonistiigi/dchapes-mode v0.0.0-20241001053921-ca0759fec205 h1:eUk79E1w8yMtXeHSzjKorxuC8qJOnyXQnLaJehxpJaI=
github.com/tonistiigi/dchapes-mode v0.0.0-20250318174251-73d941a28323/go.mod h1:3Iuxbr0P7D3zUzBMAZB+ois3h/et0shEz0qApgHYGpY= github.com/tonistiigi/dchapes-mode v0.0.0-20241001053921-ca0759fec205/go.mod h1:3Iuxbr0P7D3zUzBMAZB+ois3h/et0shEz0qApgHYGpY=
github.com/tonistiigi/fsutil v0.0.0-20250318190121-d73a4b3b8a7e h1:AiXT0JHwQA52AEOVMsxRytSI9mdJSie5gUp6OQ1R8fU= github.com/tonistiigi/fsutil v0.0.0-20250113203817-b14e27f4135a h1:EfGw4G0x/8qXWgtcZ6KVaPS+wpWOQMaypczzP8ojkMY=
github.com/tonistiigi/fsutil v0.0.0-20250318190121-d73a4b3b8a7e/go.mod h1:BKdcez7BiVtBvIcef90ZPc6ebqIWr4JWD7+EvLm6J98= github.com/tonistiigi/fsutil v0.0.0-20250113203817-b14e27f4135a/go.mod h1:Dl/9oEjK7IqnjAm21Okx/XIxUCFJzvh+XdVHUlBwXTw=
github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4 h1:7I5c2Ig/5FgqkYOh/N87NzoyI9U15qUPXhDD8uCupv8= github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4 h1:7I5c2Ig/5FgqkYOh/N87NzoyI9U15qUPXhDD8uCupv8=
github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4/go.mod h1:278M4p8WsNh3n4a1eqiFcV2FGk7wE5fwUpUom9mK9lE= github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4/go.mod h1:278M4p8WsNh3n4a1eqiFcV2FGk7wE5fwUpUom9mK9lE=
github.com/tonistiigi/jaeger-ui-rest v0.0.0-20250408171107-3dd17559e117 h1:XFwyh2JZwR5aiKLXHX2C1n0v5F11dCJpyGL1W/Cpl3U=
github.com/tonistiigi/jaeger-ui-rest v0.0.0-20250408171107-3dd17559e117/go.mod h1:3Ez1Paeg+0Ghu3KwpEGC1HgZ4CHDlg+Ez/5Baeomk54=
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0= github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0=
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk= github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk=
github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab h1:H6aJ0yKQ0gF49Qb2z5hI1UHxSQt4JMyxebFR15KnApw= github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab h1:H6aJ0yKQ0gF49Qb2z5hI1UHxSQt4JMyxebFR15KnApw=
@@ -439,8 +463,6 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc=
github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/zclconf/go-cty v1.4.0/go.mod h1:nHzOclRkoj++EU9ZjSrZvRG0BXIWt8c7loYc0qXAFGQ= github.com/zclconf/go-cty v1.4.0/go.mod h1:nHzOclRkoj++EU9ZjSrZvRG0BXIWt8c7loYc0qXAFGQ=
@@ -468,8 +490,6 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.31.0 h1:FFeLy
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.31.0/go.mod h1:TMu73/k1CP8nBUpDLc71Wj/Kf7ZS9FK5b53VapRsP9o= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.31.0/go.mod h1:TMu73/k1CP8nBUpDLc71Wj/Kf7ZS9FK5b53VapRsP9o=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.31.0 h1:lUsI2TYsQw2r1IASwoROaCnjdj2cvC2+Jbxvk6nHnWU= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.31.0 h1:lUsI2TYsQw2r1IASwoROaCnjdj2cvC2+Jbxvk6nHnWU=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.31.0/go.mod h1:2HpZxxQurfGxJlJDblybejHB6RX6pmExPNe517hREw4= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.31.0/go.mod h1:2HpZxxQurfGxJlJDblybejHB6RX6pmExPNe517hREw4=
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.31.0 h1:UGZ1QwZWY67Z6BmckTU+9Rxn04m2bD3gD6Mk0OIOCPk=
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.31.0/go.mod h1:fcwWuDuaObkkChiDlhEpSq9+X1C0omv+s5mBtToAQ64=
go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE= go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE=
go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY= go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY=
go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk= go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk=
@@ -490,14 +510,14 @@ golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPh
golang.org/x/crypto v0.0.0-20200422194213-44a606286825/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200422194213-44a606286825/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk=
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0=
golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
@@ -505,8 +525,8 @@ golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs= golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs=
golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -515,8 +535,8 @@ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -529,42 +549,43 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o= golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU= golang.org/x/tools v0.25.0 h1:oFU9pkj/iJgs+0DT+VMHrx+oBKs/LJMV+Uvg78sl+fE=
golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s= golang.org/x/tools v0.25.0/go.mod h1:/vtpO8WL1N9cQC3FN5zPqb//fRXskFHbLKk4OW1Q7rg=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/genproto/googleapis/api v0.0.0-20241021214115-324edc3d5d38 h1:2oV8dfuIkM1Ti7DwXc0BJfnwr9csz4TDXI9EmiI+Rbw= google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9 h1:T6rh4haD3GVYsgEfWExoCZA2o2FmbNyKpTuAxbEFPTg=
google.golang.org/genproto/googleapis/api v0.0.0-20241021214115-324edc3d5d38/go.mod h1:vuAjtvlwkDKF6L1GQ0SokiRLCGFfeBUXWr/aFFkHACc= google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9/go.mod h1:wp2WsuBYj6j8wUdo3ToZsdxxixbvQNAHqVJrTgi5E5M=
google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38 h1:zciRKQ4kBpFgpfC5QQCVtnnNAcLIqweL7plyZRQHVpI= google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38 h1:zciRKQ4kBpFgpfC5QQCVtnnNAcLIqweL7plyZRQHVpI=
google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI=
google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.69.4 h1:MF5TftSMkd8GLw/m0KM6V8CMOCY6NZ1NQDPGFgbTt4A= google.golang.org/grpc v1.68.1 h1:oI5oTa11+ng8r8XMMN7jAOmWfPZWbYpCFaMUTACxkM0=
google.golang.org/grpc v1.69.4/go.mod h1:vyjdE6jLBI76dgpDojsFGNaHlxdjXN9ghpnd2o7JGZ4= google.golang.org/grpc v1.68.1/go.mod h1:+q1XYFJjShcqn0QZHvCyeR4CXPA+llXIeUIfIe00waw=
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1 h1:F29+wU6Ee6qgu9TddPgooOdaqsxTMunOoj8KA5yuS5A= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1 h1:F29+wU6Ee6qgu9TddPgooOdaqsxTMunOoj8KA5yuS5A=
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1/go.mod h1:5KF+wpkbTSbGcR9zteSqZV6fqFOWBl4Yde8En8MryZA= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1/go.mod h1:5KF+wpkbTSbGcR9zteSqZV6fqFOWBl4Yde8En8MryZA=
google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io=
google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/cenkalti/backoff.v2 v2.2.1 h1:eJ9UAg01/HIHG987TwxvnzK2MgxXq97YY6rYDpY9aII=
gopkg.in/cenkalti/backoff.v2 v2.2.1/go.mod h1:S0QdOvT2AlerfSBkp0O+dk+bbIMaNbEmVk876gPCthU= gopkg.in/cenkalti/backoff.v2 v2.2.1/go.mod h1:S0QdOvT2AlerfSBkp0O+dk+bbIMaNbEmVk876gPCthU=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -575,6 +596,7 @@ gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMy
gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo= gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo=
gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/rethinkdb/rethinkdb-go.v6 v6.2.1 h1:d4KQkxAaAiRY2h5Zqis161Pv91A37uZyJOx73duwUwM=
gopkg.in/rethinkdb/rethinkdb-go.v6 v6.2.1/go.mod h1:WbjuEoo1oadwzQ4apSDU+JTvmllEHtsNHS6y7vFc7iw= gopkg.in/rethinkdb/rethinkdb-go.v6 v6.2.1/go.mod h1:WbjuEoo1oadwzQ4apSDU+JTvmllEHtsNHS6y7vFc7iw=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
@@ -606,3 +628,7 @@ sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+s
sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08=
sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=
tags.cncf.io/container-device-interface v0.8.0 h1:8bCFo/g9WODjWx3m6EYl3GfUG31eKJbaggyBDxEldRc=
tags.cncf.io/container-device-interface v0.8.0/go.mod h1:Apb7N4VdILW0EVdEMRYXIDVRZfNJZ+kmEUss2kRRQ6Y=
tags.cncf.io/container-device-interface/specs-go v0.8.0 h1:QYGFzGxvYK/ZLMrjhvY0RjpUavIn4KcmRmVP/JjdBTA=
tags.cncf.io/container-device-interface/specs-go v0.8.0/go.mod h1:BhJIkjjPh4qpys+qm4DAYtUyryaTDg9zris+AczXyws=

View File

@@ -9,13 +9,10 @@ Vagrant.configure("2") do |config|
config.vm.provision "init", type: "shell", run: "once" do |sh| config.vm.provision "init", type: "shell", run: "once" do |sh|
sh.inline = <<~SHELL sh.inline = <<~SHELL
set -x
pkg bootstrap pkg bootstrap
pkg install -y git pkg install -y go123 git
ln -s /usr/local/bin/go123 /usr/local/bin/go
fetch https://go.dev/dl/go#{ENV['GO_VERSION']}.freebsd-amd64.tar.gz go install gotest.tools/gotestsum@#{ENV['GOTESTSUM_VERSION']}
tar -C /usr/local -xzf go#{ENV['GO_VERSION']}.freebsd-amd64.tar.gz
ln -s /usr/local/go/bin/go /usr/local/bin/go
SHELL SHELL
end end
end end

View File

@@ -1,32 +0,0 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :
Vagrant.configure("2") do |config|
config.vm.box = "generic/netbsd9"
config.vm.boot_timeout = 900
config.vm.synced_folder ".", "/vagrant", type: "rsync"
config.ssh.keep_alive = true
config.vm.provision "init", type: "shell", run: "once" do |sh|
sh.inline = <<~SHELL
set -x
mkdir -p /var/tmp
chmod 1777 /var/tmp
pkgin -y install git mozilla-rootcerts
mozilla-rootcerts install
ftp https://go.dev/dl/go#{ENV['GO_VERSION']}.netbsd-amd64.tar.gz
tar -C /var/tmp -xzf go#{ENV['GO_VERSION']}.netbsd-amd64.tar.gz
cat << 'EOF' > /usr/bin/go-wrapper
#!/bin/sh
export TMPDIR="/var/tmp"
exec /var/tmp/go/bin/go "$@"
EOF
chmod +x /usr/bin/go-wrapper
ln -s /usr/bin/go-wrapper /usr/bin/go
SHELL
end
end

View File

@@ -10,12 +10,12 @@ Vagrant.configure("2") do |config|
config.vm.provision "init", type: "shell", run: "once" do |sh| config.vm.provision "init", type: "shell", run: "once" do |sh|
sh.inline = <<~SHELL sh.inline = <<~SHELL
set -x
pkg_add -x git pkg_add -x git
ftp https://go.dev/dl/go#{ENV['GO_VERSION']}.openbsd-amd64.tar.gz ftp https://go.dev/dl/go1.23.3.openbsd-amd64.tar.gz
tar -C /usr/local -xzf go#{ENV['GO_VERSION']}.openbsd-amd64.tar.gz tar -C /usr/local -xzf go1.23.3.openbsd-amd64.tar.gz
ln -s /usr/local/go/bin/go /usr/local/bin/go ln -s /usr/local/go/bin/go /usr/local/bin/go
go install gotest.tools/gotestsum@#{ENV['GOTESTSUM_VERSION']}
SHELL SHELL
end end
end end

View File

@@ -5,10 +5,9 @@ ARG ALPINE_VERSION=3.21
ARG XX_VERSION=1.6.1 ARG XX_VERSION=1.6.1
ARG GOLANGCI_LINT_VERSION=1.62.0 ARG GOLANGCI_LINT_VERSION=1.62.0
# v0.31 requires go1.24 ARG GOPLS_VERSION=v0.26.0
ARG GOPLS_VERSION=v0.30.0
# disabled: deprecated unusedvariable simplifyrange # disabled: deprecated unusedvariable simplifyrange
ARG GOPLS_ANALYZERS="embeddirective fillreturns hostport infertypeargs modernize nonewvars noresultvalues simplifycompositelit simplifyslice unusedparams yield" ARG GOPLS_ANALYZERS="embeddirective fillreturns infertypeargs nonewvars norangeoverfunc noresultvalues simplifycompositelit simplifyslice undeclaredname unusedparams useany"
FROM --platform=$BUILDPLATFORM tonistiigi/xx:${XX_VERSION} AS xx FROM --platform=$BUILDPLATFORM tonistiigi/xx:${XX_VERSION} AS xx

View File

@@ -2,8 +2,6 @@
set -eu -o pipefail set -eu -o pipefail
: "${GITHUB_ACTIONS=}"
: "${BUILDX_CMD=docker buildx}" : "${BUILDX_CMD=docker buildx}"
: "${TEST_COVERAGE=}" : "${TEST_COVERAGE=}"
@@ -39,15 +37,7 @@ if [ "$TEST_COVERAGE" = "1" ]; then
export GO_TEST_COVERPROFILE="/testreports/coverage-report$TEST_REPORT_SUFFIX.txt" export GO_TEST_COVERPROFILE="/testreports/coverage-report$TEST_REPORT_SUFFIX.txt"
fi fi
dockerConfigMount="" cid=$(docker create --rm --privileged \
if [ "$GITHUB_ACTIONS" = "true" ]; then
dockerConfigPath="$HOME/.docker/config.json"
if [ -f "$dockerConfigPath" ]; then
dockerConfigMount="-v $dockerConfigPath:/root/.docker/config.json:ro"
fi
fi
cid=$(docker create --rm --privileged $dockerConfigMount \
-v /tmp $testReportsVol \ -v /tmp $testReportsVol \
--volumes-from=$cacheVolume \ --volumes-from=$cacheVolume \
-e GITHUB_REF \ -e GITHUB_REF \

View File

@@ -6,7 +6,6 @@ import (
"fmt" "fmt"
"os" "os"
"path/filepath" "path/filepath"
"strconv"
"sync" "sync"
"github.com/docker/buildx/util/confutil" "github.com/docker/buildx/util/confutil"
@@ -15,7 +14,6 @@ import (
) )
const ( const (
version = 2
refsDir = "refs" refsDir = "refs"
groupDir = "__group__" groupDir = "__group__"
) )
@@ -33,8 +31,12 @@ type State struct {
} }
type StateGroup struct { type StateGroup struct {
// Definition is the raw representation of the group (bake definition)
Definition []byte
// Targets are the targets invoked // Targets are the targets invoked
Targets []string `json:",omitempty"` Targets []string `json:",omitempty"`
// Inputs are the user inputs (bake overrides)
Inputs []string `json:",omitempty"`
// Refs are used to track all the refs that belong to the same group // Refs are used to track all the refs that belong to the same group
Refs []string Refs []string
} }
@@ -50,7 +52,9 @@ func New(cfg *confutil.Config) (*LocalState, error) {
if err := cfg.MkdirAll(refsDir, 0700); err != nil { if err := cfg.MkdirAll(refsDir, 0700); err != nil {
return nil, err return nil, err
} }
return &LocalState{cfg: cfg}, nil return &LocalState{
cfg: cfg,
}, nil
} }
func (ls *LocalState) ReadRef(builderName, nodeName, id string) (*State, error) { func (ls *LocalState) ReadRef(builderName, nodeName, id string) (*State, error) {
@@ -83,12 +87,8 @@ func (ls *LocalState) SaveRef(builderName, nodeName, id string, st State) error
return ls.cfg.AtomicWriteFile(filepath.Join(refDir, id), dt, 0644) return ls.cfg.AtomicWriteFile(filepath.Join(refDir, id), dt, 0644)
} }
func (ls *LocalState) GroupDir() string {
return filepath.Join(ls.cfg.Dir(), refsDir, groupDir)
}
func (ls *LocalState) ReadGroup(id string) (*StateGroup, error) { func (ls *LocalState) ReadGroup(id string) (*StateGroup, error) {
dt, err := os.ReadFile(filepath.Join(ls.GroupDir(), id)) dt, err := os.ReadFile(filepath.Join(ls.cfg.Dir(), refsDir, groupDir, id))
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -208,7 +208,7 @@ func (ls *LocalState) removeGroup(id string) error {
if id == "" { if id == "" {
return errors.Errorf("group ref empty") return errors.Errorf("group ref empty")
} }
f := filepath.Join(ls.GroupDir(), id) f := filepath.Join(ls.cfg.Dir(), refsDir, groupDir, id)
if _, err := os.Lstat(f); err != nil { if _, err := os.Lstat(f); err != nil {
if !os.IsNotExist(err) { if !os.IsNotExist(err) {
return err return err
@@ -230,16 +230,3 @@ func (ls *LocalState) validate(builderName, nodeName, id string) error {
} }
return nil return nil
} }
func (ls *LocalState) readVersion() int {
if vdt, err := os.ReadFile(filepath.Join(ls.cfg.Dir(), refsDir, "version")); err == nil {
if v, err := strconv.Atoi(string(vdt)); err == nil {
return v
}
}
return 1
}
func (ls *LocalState) writeVersion(version int) error {
return ls.cfg.AtomicWriteFile(filepath.Join(refsDir, "version"), []byte(strconv.Itoa(version)), 0600)
}

View File

@@ -68,8 +68,10 @@ var (
testStateGroupID = "kvqs0sgly2rmitz84r25u9qd0" testStateGroupID = "kvqs0sgly2rmitz84r25u9qd0"
testStateGroup = StateGroup{ testStateGroup = StateGroup{
Targets: []string{"pre-checkin"}, Definition: []byte(`{"group":{"default":{"targets":["pre-checkin"]},"pre-checkin":{"targets":["vendor-update","format","build"]}},"target":{"build":{"context":".","dockerfile":"dev.Dockerfile","target":"build-update","platforms":["linux/amd64"],"output":["."]},"format":{"context":".","dockerfile":"dev.Dockerfile","target":"format-update","platforms":["linux/amd64"],"output":["."]},"vendor-update":{"context":".","dockerfile":"dev.Dockerfile","target":"vendor-update","platforms":["linux/amd64"],"output":["."]}}}`),
Refs: []string{"builder/builder0/hx2qf1w11qvz1x3k471c5i8xw", "builder/builder0/968zj0g03jmlx0s8qslnvh6rl", "builder/builder0/naf44f9i1710lf7y12lv5hb1z"}, Targets: []string{"pre-checkin"},
Inputs: []string{"*.platform=linux/amd64"},
Refs: []string{"builder/builder0/hx2qf1w11qvz1x3k471c5i8xw", "builder/builder0/968zj0g03jmlx0s8qslnvh6rl", "builder/builder0/naf44f9i1710lf7y12lv5hb1z"},
} }
testStateGroupRef1ID = "hx2qf1w11qvz1x3k471c5i8xw" testStateGroupRef1ID = "hx2qf1w11qvz1x3k471c5i8xw"

View File

@@ -1,56 +0,0 @@
package localstate
import (
"encoding/json"
"os"
"path/filepath"
"github.com/pkg/errors"
)
func (ls *LocalState) MigrateIfNeeded() error {
currentVersion := ls.readVersion()
if currentVersion == version {
return nil
}
migrations := map[int]func(*LocalState) error{
2: (*LocalState).migration2,
}
for v := currentVersion + 1; v <= version; v++ {
migration, found := migrations[v]
if !found {
return errors.Errorf("localstate migration v%d not found", v)
}
if err := migration(ls); err != nil {
return errors.Wrapf(err, "localstate migration v%d failed", v)
}
}
return ls.writeVersion(version)
}
func (ls *LocalState) migration2() error {
return filepath.Walk(ls.GroupDir(), func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
dt, err := os.ReadFile(path)
if err != nil {
return err
}
var stg StateGroup
if err := json.Unmarshal(dt, &stg); err != nil {
return err
}
mdt, err := json.Marshal(stg)
if err != nil {
return err
}
if err := os.WriteFile(path, mdt, 0600); err != nil {
return err
}
return nil
})
}

View File

@@ -4,7 +4,6 @@ import (
"context" "context"
"fmt" "fmt"
"io" "io"
"slices"
"github.com/docker/buildx/monitor/types" "github.com/docker/buildx/monitor/types"
"github.com/pkg/errors" "github.com/pkg/errors"
@@ -51,7 +50,14 @@ func (cm *AttachCmd) Exec(ctx context.Context, args []string) error {
if err != nil { if err != nil {
return errors.Errorf("failed to get the list of sessions: %v", err) return errors.Errorf("failed to get the list of sessions: %v", err)
} }
if !slices.Contains(refs, ref) { found := false
for _, s := range refs {
if s == ref {
found = true
break
}
}
if !found {
return errors.Errorf("unknown ID: %q", ref) return errors.Errorf("unknown ID: %q", ref)
} }
cm.m.Detach() // Finish existing attach cm.m.Detach() // Finish existing attach

View File

@@ -66,7 +66,7 @@ func (cm *ReloadCmd) Exec(ctx context.Context, args []string) error {
if err != nil { if err != nil {
var be *controllererrors.BuildError var be *controllererrors.BuildError
if errors.As(err, &be) { if errors.As(err, &be) {
ref = be.SessionID ref = be.Ref
resultUpdated = true resultUpdated = true
} else { } else {
fmt.Printf("failed to reload: %v\n", err) fmt.Printf("failed to reload: %v\n", err)

View File

@@ -2,8 +2,6 @@ package store
import ( import (
"fmt" "fmt"
"maps"
"slices"
"time" "time"
"github.com/containerd/platforms" "github.com/containerd/platforms"
@@ -46,7 +44,7 @@ func (ng *NodeGroup) Leave(name string) error {
if len(ng.Nodes) == 1 { if len(ng.Nodes) == 1 {
return errors.Errorf("can not leave last node, do you want to rm instance instead?") return errors.Errorf("can not leave last node, do you want to rm instance instead?")
} }
ng.Nodes = slices.Delete(ng.Nodes, i, i+1) ng.Nodes = append(ng.Nodes[:i], ng.Nodes[i+1:]...)
return nil return nil
} }
@@ -94,7 +92,9 @@ func (ng *NodeGroup) Update(name, endpoint string, platforms []string, endpoints
needsRestart = true needsRestart = true
} }
if buildkitdConfigFile != "" { if buildkitdConfigFile != "" {
maps.Copy(n.Files, files) for k, v := range files {
n.Files[k] = v
}
needsRestart = true needsRestart = true
} }
if needsRestart { if needsRestart {
@@ -146,7 +146,9 @@ func (n *Node) Copy() *Node {
buildkitdFlags := []string{} buildkitdFlags := []string{}
copy(buildkitdFlags, n.BuildkitdFlags) copy(buildkitdFlags, n.BuildkitdFlags)
driverOpts := map[string]string{} driverOpts := map[string]string{}
maps.Copy(driverOpts, n.DriverOpts) for k, v := range n.DriverOpts {
driverOpts[k] = v
}
files := map[string][]byte{} files := map[string][]byte{}
for k, v := range n.Files { for k, v := range n.Files {
vv := []byte{} vv := []byte{}

View File

@@ -39,7 +39,7 @@ func ValidateName(s string) (string, error) {
func GenerateName(txn *Txn) (string, error) { func GenerateName(txn *Txn) (string, error) {
var name string var name string
for i := range 6 { for i := 0; i < 6; i++ {
name = namesgenerator.GetRandomName(i) name = namesgenerator.GetRandomName(i)
if _, err := txn.NodeGroupByName(name); err != nil { if _, err := txn.NodeGroupByName(name); err != nil {
if !os.IsNotExist(errors.Cause(err)) { if !os.IsNotExist(errors.Cause(err)) {

View File

@@ -17,7 +17,6 @@ import (
"github.com/containerd/continuity/fs/fstest" "github.com/containerd/continuity/fs/fstest"
"github.com/docker/buildx/bake" "github.com/docker/buildx/bake"
"github.com/docker/buildx/util/gitutil" "github.com/docker/buildx/util/gitutil"
"github.com/docker/buildx/util/gitutil/gittestutil"
"github.com/moby/buildkit/client" "github.com/moby/buildkit/client"
"github.com/moby/buildkit/frontend/subrequests/lint" "github.com/moby/buildkit/frontend/subrequests/lint"
"github.com/moby/buildkit/identity" "github.com/moby/buildkit/identity"
@@ -39,8 +38,6 @@ func bakeCmd(sb integration.Sandbox, opts ...cmdOpt) (string, error) {
var bakeTests = []func(t *testing.T, sb integration.Sandbox){ var bakeTests = []func(t *testing.T, sb integration.Sandbox){
testBakePrint, testBakePrint,
testBakePrintSensitive, testBakePrintSensitive,
testBakePrintOverrideEmpty,
testBakePrintKeepEscaped,
testBakeLocal, testBakeLocal,
testBakeLocalMulti, testBakeLocalMulti,
testBakeRemote, testBakeRemote,
@@ -289,107 +286,6 @@ RUN echo "Hello ${HELLO}"
} }
} }
func testBakePrintOverrideEmpty(t *testing.T, sb integration.Sandbox) {
dockerfile := []byte(`
FROM scratch
COPY foo /foo
`)
bakefile := []byte(`
target "default" {
cache-to = ["type=gha,mode=min,scope=integration-tests"]
}
`)
dir := tmpdir(
t,
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
fstest.CreateFile("Dockerfile", dockerfile, 0600),
fstest.CreateFile("foo", []byte("foo"), 0600),
)
cmd := buildxCmd(sb, withDir(dir), withArgs("bake", "--print", "--set", "*.cache-to="))
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
cmd.Stdout = &stdout
cmd.Stderr = &stderr
require.NoError(t, cmd.Run(), stdout.String(), stderr.String())
require.JSONEq(t, `{
"group": {
"default": {
"targets": [
"default"
]
}
},
"target": {
"default": {
"context": ".",
"dockerfile": "Dockerfile"
}
}
}`, stdout.String())
}
func testBakePrintKeepEscaped(t *testing.T, sb integration.Sandbox) {
bakefile := []byte(`
target "default" {
dockerfile-inline = <<EOT
ARG VERSION=latest
FROM alpine:$${VERSION}
EOT
args = {
VERSION = "3.21"
}
annotations = [
"org.opencontainers.image.authors=$${user}"
]
labels = {
foo = "hello %%{bar}"
}
}
`)
dir := tmpdir(t, fstest.CreateFile("docker-bake.hcl", bakefile, 0600))
cmd := buildxCmd(sb, withDir(dir), withArgs("bake", "--print"))
stdout := bytes.Buffer{}
stderr := bytes.Buffer{}
cmd.Stdout = &stdout
cmd.Stderr = &stderr
require.NoError(t, cmd.Run(), stdout.String(), stderr.String())
require.JSONEq(t, `{
"group": {
"default": {
"targets": [
"default"
]
}
},
"target": {
"default": {
"annotations": [
"org.opencontainers.image.authors=$${user}"
],
"context": ".",
"dockerfile": "Dockerfile",
"dockerfile-inline": "ARG VERSION=latest\nFROM alpine:$${VERSION}\n",
"args": {
"VERSION": "3.21"
},
"labels": {
"foo": "hello %%{bar}"
}
}
}
}`, stdout.String())
// test build with definition from print output
dir = tmpdir(t, fstest.CreateFile("docker-bake.json", stdout.Bytes(), 0600))
cmd = buildxCmd(sb, withDir(dir), withArgs("bake"))
out, err := cmd.CombinedOutput()
require.NoError(t, err, string(out))
}
func testBakeLocal(t *testing.T, sb integration.Sandbox) { func testBakeLocal(t *testing.T, sb integration.Sandbox) {
dockerfile := []byte(` dockerfile := []byte(`
FROM scratch FROM scratch
@@ -477,10 +373,10 @@ EOT
git, err := gitutil.New(gitutil.WithWorkingDir(dir)) git, err := gitutil.New(gitutil.WithWorkingDir(dir))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "docker-bake.hcl", "foo") gitutil.GitAdd(git, t, "docker-bake.hcl", "foo")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
addr := gittestutil.GitServeHTTP(git, t) addr := gitutil.GitServeHTTP(git, t)
out, err := bakeCmd(sb, withDir(dir), withArgs(addr, "--set", "*.output=type=local,dest="+dirDest)) out, err := bakeCmd(sb, withDir(dir), withArgs(addr, "--set", "*.output=type=local,dest="+dirDest))
require.NoError(t, err, out) require.NoError(t, err, out)
@@ -507,12 +403,12 @@ EOT
git, err := gitutil.New(gitutil.WithWorkingDir(dir)) git, err := gitutil.New(gitutil.WithWorkingDir(dir))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "docker-bake.hcl", "foo") gitutil.GitAdd(git, t, "docker-bake.hcl", "foo")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
token := identity.NewID() token := identity.NewID()
addr := gittestutil.GitServeHTTP(git, t, gittestutil.WithAccessToken(token)) addr := gitutil.GitServeHTTP(git, t, gitutil.WithAccessToken(token))
out, err := bakeCmd(sb, withDir(dir), out, err := bakeCmd(sb, withDir(dir),
withEnv("BUILDX_BAKE_GIT_AUTH_TOKEN="+token), withEnv("BUILDX_BAKE_GIT_AUTH_TOKEN="+token),
@@ -554,10 +450,10 @@ EOT
git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec)) git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "docker-bake.hcl", "bar") gitutil.GitAdd(git, t, "docker-bake.hcl", "bar")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
addr := gittestutil.GitServeHTTP(git, t) addr := gitutil.GitServeHTTP(git, t)
out, err := bakeCmd(sb, withDir(dirSrc), withArgs(addr, "--file", "cwd://local-docker-bake.hcl", "--set", "*.output=type=local,dest="+dirDest)) out, err := bakeCmd(sb, withDir(dirSrc), withArgs(addr, "--file", "cwd://local-docker-bake.hcl", "--set", "*.output=type=local,dest="+dirDest))
require.NoError(t, err, out) require.NoError(t, err, out)
@@ -623,10 +519,10 @@ EOT
git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec)) git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "docker-bake.hcl") gitutil.GitAdd(git, t, "docker-bake.hcl")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
addr := gittestutil.GitServeHTTP(git, t) addr := gitutil.GitServeHTTP(git, t)
out, err := bakeCmd(sb, withDir(dirSrc), withArgs(addr, "--set", "*.output=type=local,dest="+dirDest)) out, err := bakeCmd(sb, withDir(dirSrc), withArgs(addr, "--set", "*.output=type=local,dest="+dirDest))
require.NoError(t, err, out) require.NoError(t, err, out)
@@ -656,17 +552,17 @@ EOT
gitSpec, err := gitutil.New(gitutil.WithWorkingDir(dirSpec)) gitSpec, err := gitutil.New(gitutil.WithWorkingDir(dirSpec))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(gitSpec, t) gitutil.GitInit(gitSpec, t)
gittestutil.GitAdd(gitSpec, t, "docker-bake.hcl") gitutil.GitAdd(gitSpec, t, "docker-bake.hcl")
gittestutil.GitCommit(gitSpec, t, "initial commit") gitutil.GitCommit(gitSpec, t, "initial commit")
addrSpec := gittestutil.GitServeHTTP(gitSpec, t) addrSpec := gitutil.GitServeHTTP(gitSpec, t)
gitSrc, err := gitutil.New(gitutil.WithWorkingDir(dirSrc)) gitSrc, err := gitutil.New(gitutil.WithWorkingDir(dirSrc))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(gitSrc, t) gitutil.GitInit(gitSrc, t)
gittestutil.GitAdd(gitSrc, t, "foo") gitutil.GitAdd(gitSrc, t, "foo")
gittestutil.GitCommit(gitSrc, t, "initial commit") gitutil.GitCommit(gitSrc, t, "initial commit")
addrSrc := gittestutil.GitServeHTTP(gitSrc, t) addrSrc := gitutil.GitServeHTTP(gitSrc, t)
out, err := bakeCmd(sb, withDir("/tmp"), withArgs(addrSpec, addrSrc, "--set", "*.output=type=local,dest="+dirDest)) out, err := bakeCmd(sb, withDir("/tmp"), withArgs(addrSpec, addrSrc, "--set", "*.output=type=local,dest="+dirDest))
require.NoError(t, err, out) require.NoError(t, err, out)
@@ -697,10 +593,10 @@ COPY super-cool.txt /
git, err := gitutil.New(gitutil.WithWorkingDir(dir)) git, err := gitutil.New(gitutil.WithWorkingDir(dir))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "docker-bake.hcl", "bar") gitutil.GitAdd(git, t, "docker-bake.hcl", "bar")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
addr := gittestutil.GitServeHTTP(git, t) addr := gitutil.GitServeHTTP(git, t)
out, err := bakeCmd(sb, withDir("/tmp"), withArgs(addr, "--set", "*.output=type=local,dest="+dirDest)) out, err := bakeCmd(sb, withDir("/tmp"), withArgs(addr, "--set", "*.output=type=local,dest="+dirDest))
require.NoError(t, err, out) require.NoError(t, err, out)
@@ -738,10 +634,10 @@ EOT
git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec)) git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "docker-bake.hcl") gitutil.GitAdd(git, t, "docker-bake.hcl")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
addr := gittestutil.GitServeHTTP(git, t) addr := gitutil.GitServeHTTP(git, t)
out, err := bakeCmd( out, err := bakeCmd(
sb, sb,
@@ -786,10 +682,10 @@ EOT
git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec)) git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "docker-bake.hcl") gitutil.GitAdd(git, t, "docker-bake.hcl")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
addr := gittestutil.GitServeHTTP(git, t) addr := gitutil.GitServeHTTP(git, t)
out, err := bakeCmd( out, err := bakeCmd(
sb, sb,
@@ -842,13 +738,13 @@ COPY foo /foo
git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec)) git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "docker-bake.hcl") gitutil.GitAdd(git, t, "docker-bake.hcl")
gittestutil.GitAdd(git, t, "Dockerfile") gitutil.GitAdd(git, t, "Dockerfile")
gittestutil.GitAdd(git, t, "foo") gitutil.GitAdd(git, t, "foo")
gittestutil.GitAdd(git, t, "bar") gitutil.GitAdd(git, t, "bar")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
addr := gittestutil.GitServeHTTP(git, t) addr := gitutil.GitServeHTTP(git, t)
out, err := bakeCmd( out, err := bakeCmd(
sb, sb,
@@ -894,10 +790,10 @@ COPY foo /foo
git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec)) git, err := gitutil.New(gitutil.WithWorkingDir(dirSpec))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "docker-bake.hcl") gitutil.GitAdd(git, t, "docker-bake.hcl")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
addr := gittestutil.GitServeHTTP(git, t) addr := gitutil.GitServeHTTP(git, t)
out, err := bakeCmd( out, err := bakeCmd(
sb, sb,
@@ -975,7 +871,6 @@ target "default" {
}) })
} }
} }
func testBakeSetNonExistingOutsideNoParallel(t *testing.T, sb integration.Sandbox) { func testBakeSetNonExistingOutsideNoParallel(t *testing.T, sb integration.Sandbox) {
for _, ent := range []bool{true, false} { for _, ent := range []bool{true, false} {
t.Run(fmt.Sprintf("ent=%v", ent), func(t *testing.T) { t.Run(fmt.Sprintf("ent=%v", ent), func(t *testing.T) {
@@ -1078,11 +973,11 @@ FROM scratch
COPY foo /foo COPY foo /foo
`) `)
destDir := t.TempDir() destDir := t.TempDir()
bakefile := fmt.Appendf(nil, ` bakefile := []byte(fmt.Sprintf(`
target "default" { target "default" {
output = ["type=local,dest=%s/not/exists"] output = ["type=local,dest=%s/not/exists"]
} }
`, destDir) `, destDir))
dir := tmpdir( dir := tmpdir(
t, t,
fstest.CreateFile("docker-bake.hcl", bakefile, 0600), fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
@@ -1112,11 +1007,11 @@ FROM scratch
COPY foo /foo COPY foo /foo
`) `)
destDir := t.TempDir() destDir := t.TempDir()
bakefile := fmt.Appendf(nil, ` bakefile := []byte(fmt.Sprintf(`
target "default" { target "default" {
output = ["type=local,dest=%s"] output = ["type=local,dest=%s"]
} }
`, destDir) `, destDir))
dir := tmpdir( dir := tmpdir(
t, t,
fstest.CreateFile("docker-bake.hcl", bakefile, 0600), fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
@@ -1213,11 +1108,11 @@ COPY Dockerfile /foo
keyDir := t.TempDir() keyDir := t.TempDir()
err := writeTempPrivateKey(filepath.Join(keyDir, "id_rsa")) err := writeTempPrivateKey(filepath.Join(keyDir, "id_rsa"))
require.NoError(t, err) require.NoError(t, err)
bakefile := fmt.Appendf(nil, ` bakefile := []byte(fmt.Sprintf(`
target "default" { target "default" {
ssh = ["key=%s"] ssh = ["key=%s"]
} }
`, filepath.Join(keyDir, "id_rsa")) `, filepath.Join(keyDir, "id_rsa")))
dir := tmpdir( dir := tmpdir(
t, t,
fstest.CreateFile("docker-bake.hcl", bakefile, 0600), fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
@@ -1376,8 +1271,8 @@ target "default" {
type mdT struct { type mdT struct {
Default struct { Default struct {
BuildRef string `json:"buildx.build.ref"` BuildRef string `json:"buildx.build.ref"`
BuildProvenance map[string]any `json:"buildx.build.provenance"` BuildProvenance map[string]interface{} `json:"buildx.build.provenance"`
} `json:"default"` } `json:"default"`
} }
var md mdT var md mdT

View File

@@ -19,7 +19,6 @@ import (
"github.com/docker/buildx/localstate" "github.com/docker/buildx/localstate"
"github.com/docker/buildx/util/confutil" "github.com/docker/buildx/util/confutil"
"github.com/docker/buildx/util/gitutil" "github.com/docker/buildx/util/gitutil"
"github.com/docker/buildx/util/gitutil/gittestutil"
"github.com/moby/buildkit/client" "github.com/moby/buildkit/client"
"github.com/moby/buildkit/frontend/subrequests/lint" "github.com/moby/buildkit/frontend/subrequests/lint"
"github.com/moby/buildkit/frontend/subrequests/outline" "github.com/moby/buildkit/frontend/subrequests/outline"
@@ -127,10 +126,10 @@ COPY foo /foo
git, err := gitutil.New(gitutil.WithWorkingDir(dir)) git, err := gitutil.New(gitutil.WithWorkingDir(dir))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "Dockerfile", "foo") gitutil.GitAdd(git, t, "Dockerfile", "foo")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
addr := gittestutil.GitServeHTTP(git, t) addr := gitutil.GitServeHTTP(git, t)
out, err := buildCmd(sb, withDir(dir), withArgs("--output=type=local,dest="+dirDest, addr)) out, err := buildCmd(sb, withDir(dir), withArgs("--output=type=local,dest="+dirDest, addr))
require.NoError(t, err, out) require.NoError(t, err, out)
@@ -239,10 +238,10 @@ COPY foo /foo
git, err := gitutil.New(gitutil.WithWorkingDir(dir)) git, err := gitutil.New(gitutil.WithWorkingDir(dir))
require.NoError(t, err) require.NoError(t, err)
gittestutil.GitInit(git, t) gitutil.GitInit(git, t)
gittestutil.GitAdd(git, t, "build.Dockerfile", "foo") gitutil.GitAdd(git, t, "build.Dockerfile", "foo")
gittestutil.GitCommit(git, t, "initial commit") gitutil.GitCommit(git, t, "initial commit")
addr := gittestutil.GitServeHTTP(git, t) addr := gitutil.GitServeHTTP(git, t)
out, err := buildCmd(sb, withDir(dir), withArgs( out, err := buildCmd(sb, withDir(dir), withArgs(
"-f", "build.Dockerfile", "-f", "build.Dockerfile",
@@ -805,8 +804,8 @@ func buildMetadataProvenance(t *testing.T, sb integration.Sandbox, metadataMode
require.NoError(t, err) require.NoError(t, err)
type mdT struct { type mdT struct {
BuildRef string `json:"buildx.build.ref"` BuildRef string `json:"buildx.build.ref"`
BuildProvenance map[string]any `json:"buildx.build.provenance"` BuildProvenance map[string]interface{} `json:"buildx.build.provenance"`
} }
var md mdT var md mdT
err = json.Unmarshal(dt, &md) err = json.Unmarshal(dt, &md)

View File

@@ -50,7 +50,7 @@ func withDir(dir string) cmdOpt {
func buildxCmd(sb integration.Sandbox, opts ...cmdOpt) *exec.Cmd { func buildxCmd(sb integration.Sandbox, opts ...cmdOpt) *exec.Cmd {
cmd := exec.Command("buildx") cmd := exec.Command("buildx")
cmd.Env = os.Environ() cmd.Env = append([]string{}, os.Environ()...)
for _, opt := range opts { for _, opt := range opts {
opt(cmd) opt(cmd)
} }
@@ -77,7 +77,7 @@ func buildxCmd(sb integration.Sandbox, opts ...cmdOpt) *exec.Cmd {
func dockerCmd(sb integration.Sandbox, opts ...cmdOpt) *exec.Cmd { func dockerCmd(sb integration.Sandbox, opts ...cmdOpt) *exec.Cmd {
cmd := exec.Command("docker") cmd := exec.Command("docker")
cmd.Env = os.Environ() cmd.Env = append([]string{}, os.Environ()...)
for _, opt := range opts { for _, opt := range opts {
opt(cmd) opt(cmd)
} }
@@ -214,7 +214,7 @@ func skipNoCompatBuildKit(t *testing.T, sb integration.Sandbox, constraint strin
} }
} }
func ptrstr(s any) *string { func ptrstr(s interface{}) *string {
var n *string var n *string
if reflect.ValueOf(s).Kind() == reflect.String { if reflect.ValueOf(s).Kind() == reflect.String {
ss := s.(string) ss := s.(string)

View File

@@ -45,7 +45,7 @@ func testRmMulti(t *testing.T, sb integration.Sandbox) {
} }
var builderNames []string var builderNames []string
for range 3 { for i := 0; i < 3; i++ {
out, err := createCmd(sb, withArgs("--driver", "docker-container")) out, err := createCmd(sb, withArgs("--driver", "docker-container"))
require.NoError(t, err, out) require.NoError(t, err, out)
builderName := strings.TrimSpace(out) builderName := strings.TrimSpace(out)

View File

@@ -2,7 +2,6 @@ package workers
import ( import (
"os" "os"
"slices"
"strings" "strings"
"github.com/moby/buildkit/util/testutil/integration" "github.com/moby/buildkit/util/testutil/integration"
@@ -50,14 +49,23 @@ func (s *backend) ExtraEnv() []string {
func (s backend) Supports(feature string) bool { func (s backend) Supports(feature string) bool {
if enabledFeatures := os.Getenv("BUILDKIT_TEST_ENABLE_FEATURES"); enabledFeatures != "" { if enabledFeatures := os.Getenv("BUILDKIT_TEST_ENABLE_FEATURES"); enabledFeatures != "" {
if slices.Contains(strings.Split(enabledFeatures, ","), feature) { for _, enabledFeature := range strings.Split(enabledFeatures, ",") {
return true if feature == enabledFeature {
return true
}
} }
} }
if disabledFeatures := os.Getenv("BUILDKIT_TEST_DISABLE_FEATURES"); disabledFeatures != "" { if disabledFeatures := os.Getenv("BUILDKIT_TEST_DISABLE_FEATURES"); disabledFeatures != "" {
if slices.Contains(strings.Split(disabledFeatures, ","), feature) { for _, disabledFeature := range strings.Split(disabledFeatures, ",") {
if feature == disabledFeature {
return false
}
}
}
for _, unsupportedFeature := range s.unsupportedFeatures {
if feature == unsupportedFeature {
return false return false
} }
} }
return !slices.Contains(s.unsupportedFeatures, feature) return true
} }

View File

@@ -90,7 +90,7 @@ func (a *Attest) ToPB() *controllerapi.Attest {
} }
func (a *Attest) MarshalJSON() ([]byte, error) { func (a *Attest) MarshalJSON() ([]byte, error) {
m := make(map[string]any, len(a.Attrs)+2) m := make(map[string]interface{}, len(a.Attrs)+2)
for k, v := range a.Attrs { for k, v := range a.Attrs {
m[k] = v m[k] = v
} }
@@ -102,7 +102,7 @@ func (a *Attest) MarshalJSON() ([]byte, error) {
} }
func (a *Attest) UnmarshalJSON(data []byte) error { func (a *Attest) UnmarshalJSON(data []byte) error {
var m map[string]any var m map[string]interface{}
if err := json.Unmarshal(data, &m); err != nil { if err := json.Unmarshal(data, &m); err != nil {
return err return err
} }
@@ -148,8 +148,9 @@ func (a *Attest) UnmarshalText(text []byte) error {
if !ok { if !ok {
return errors.Errorf("invalid value %s", field) return errors.Errorf("invalid value %s", field)
} }
key = strings.TrimSpace(strings.ToLower(key))
switch strings.TrimSpace(strings.ToLower(key)) { switch key {
case "type": case "type":
a.Type = value a.Type = value
case "disabled": case "disabled":
@@ -202,7 +203,7 @@ func ParseAttests(in []string) ([]*controllerapi.Attest, error) {
func ConvertAttests(in []*Attest) ([]*controllerapi.Attest, error) { func ConvertAttests(in []*Attest) ([]*controllerapi.Attest, error) {
out := make([]*controllerapi.Attest, 0, len(in)) out := make([]*controllerapi.Attest, 0, len(in))
// Check for duplicate attestations while we convert them // Check for dupplicate attestations while we convert them
// to the controller API. // to the controller API.
found := map[string]struct{}{} found := map[string]struct{}{}
for _, attest := range in { for _, attest := range in {

View File

@@ -22,19 +22,18 @@ func (e *Attests) FromCtyValue(in cty.Value, p cty.Path) error {
return p.NewErrorf("%s", convert.MismatchMessage(got, want)) return p.NewErrorf("%s", convert.MismatchMessage(got, want))
} }
func (e *Attests) fromCtyValue(in cty.Value, p cty.Path) (retErr error) { func (e *Attests) fromCtyValue(in cty.Value, p cty.Path) error {
*e = make([]*Attest, 0, in.LengthInt()) *e = make([]*Attest, 0, in.LengthInt())
for elem := in.ElementIterator(); elem.Next(); {
_, value := elem.Element()
yield := func(value cty.Value) bool {
entry := &Attest{} entry := &Attest{}
if retErr = entry.FromCtyValue(value, p); retErr != nil { if err := entry.FromCtyValue(value, p); err != nil {
return false return err
} }
*e = append(*e, entry) *e = append(*e, entry)
return true
} }
eachElement(in)(yield) return nil
return retErr
} }
func (e Attests) ToCtyValue() cty.Value { func (e Attests) ToCtyValue() cty.Value {
@@ -65,10 +64,6 @@ func (e *Attest) FromCtyValue(in cty.Value, p cty.Path) error {
e.Attrs = map[string]string{} e.Attrs = map[string]string{}
for it := conv.ElementIterator(); it.Next(); { for it := conv.ElementIterator(); it.Next(); {
k, v := it.Element() k, v := it.Element()
if !v.IsKnown() {
continue
}
switch key := k.AsString(); key { switch key := k.AsString(); key {
case "type": case "type":
e.Type = v.AsString() e.Type = v.AsString()

View File

@@ -13,21 +13,16 @@ func TestAttests(t *testing.T) {
attests := Attests{ attests := Attests{
{Type: "provenance", Attrs: map[string]string{"mode": "max"}}, {Type: "provenance", Attrs: map[string]string{"mode": "max"}},
{Type: "sbom", Disabled: true}, {Type: "sbom", Disabled: true},
{Type: "sbom", Attrs: map[string]string{
"generator": "scanner",
"ENV1": `"foo,bar"`,
"Env2": "hello",
}},
} }
expected := `[{"type":"provenance","mode":"max"},{"type":"sbom","disabled":true},{"ENV1":"\"foo,bar\"","Env2":"hello","generator":"scanner","type":"sbom"}]` expected := `[{"type":"provenance","mode":"max"},{"type":"sbom","disabled":true}]`
actual, err := json.Marshal(attests) actual, err := json.Marshal(attests)
require.NoError(t, err) require.NoError(t, err)
require.JSONEq(t, expected, string(actual)) require.JSONEq(t, expected, string(actual))
}) })
t.Run("UnmarshalJSON", func(t *testing.T) { t.Run("UnmarshalJSON", func(t *testing.T) {
in := `[{"type":"provenance","mode":"max"},{"type":"sbom","disabled":true},{"ENV1":"\"foo,bar\"","Env2":"hello","generator":"scanner","type":"sbom"}]` in := `[{"type":"provenance","mode":"max"},{"type":"sbom","disabled":true}]`
var actual Attests var actual Attests
err := json.Unmarshal([]byte(in), &actual) err := json.Unmarshal([]byte(in), &actual)
@@ -36,11 +31,6 @@ func TestAttests(t *testing.T) {
expected := Attests{ expected := Attests{
{Type: "provenance", Attrs: map[string]string{"mode": "max"}}, {Type: "provenance", Attrs: map[string]string{"mode": "max"}},
{Type: "sbom", Disabled: true, Attrs: map[string]string{}}, {Type: "sbom", Disabled: true, Attrs: map[string]string{}},
{Type: "sbom", Disabled: false, Attrs: map[string]string{
"generator": "scanner",
"ENV1": `"foo,bar"`,
"Env2": "hello",
}},
} }
require.Equal(t, expected, actual) require.Equal(t, expected, actual)
}) })
@@ -51,14 +41,7 @@ func TestAttests(t *testing.T) {
"type": cty.StringVal("provenance"), "type": cty.StringVal("provenance"),
"mode": cty.StringVal("max"), "mode": cty.StringVal("max"),
}), }),
cty.ObjectVal(map[string]cty.Value{
"type": cty.StringVal("sbom"),
"generator": cty.StringVal("scan"),
"ENV1": cty.StringVal(`foo,bar`),
"Env2": cty.StringVal(`hello`),
}),
cty.StringVal("type=sbom,disabled=true"), cty.StringVal("type=sbom,disabled=true"),
cty.StringVal(`type=sbom,generator=scan,"FOO=bar,baz",Hello=World`),
}) })
var actual Attests var actual Attests
@@ -67,17 +50,7 @@ func TestAttests(t *testing.T) {
expected := Attests{ expected := Attests{
{Type: "provenance", Attrs: map[string]string{"mode": "max"}}, {Type: "provenance", Attrs: map[string]string{"mode": "max"}},
{Type: "sbom", Attrs: map[string]string{
"generator": "scan",
"ENV1": "foo,bar",
"Env2": "hello",
}},
{Type: "sbom", Disabled: true, Attrs: map[string]string{}}, {Type: "sbom", Disabled: true, Attrs: map[string]string{}},
{Type: "sbom", Attrs: map[string]string{
"generator": "scan",
"FOO": "bar,baz",
"Hello": "World",
}},
} }
require.Equal(t, expected, actual) require.Equal(t, expected, actual)
}) })
@@ -86,11 +59,6 @@ func TestAttests(t *testing.T) {
attests := Attests{ attests := Attests{
{Type: "provenance", Attrs: map[string]string{"mode": "max"}}, {Type: "provenance", Attrs: map[string]string{"mode": "max"}},
{Type: "sbom", Disabled: true}, {Type: "sbom", Disabled: true},
{Type: "sbom", Attrs: map[string]string{
"generator": "scan",
"ENV1": `"foo,bar"`,
"Env2": "hello",
}},
} }
actual := attests.ToCtyValue() actual := attests.ToCtyValue()
@@ -103,12 +71,6 @@ func TestAttests(t *testing.T) {
"type": cty.StringVal("sbom"), "type": cty.StringVal("sbom"),
"disabled": cty.StringVal("true"), "disabled": cty.StringVal("true"),
}), }),
cty.MapVal(map[string]cty.Value{
"type": cty.StringVal("sbom"),
"generator": cty.StringVal("scan"),
"ENV1": cty.StringVal(`"foo,bar"`),
"Env2": cty.StringVal("hello"),
}),
}) })
result := actual.Equals(expected) result := actual.Equals(expected)

View File

@@ -5,7 +5,6 @@ import (
"encoding/json" "encoding/json"
"maps" "maps"
"os" "os"
"strconv"
"strings" "strings"
awsconfig "github.com/aws/aws-sdk-go-v2/config" awsconfig "github.com/aws/aws-sdk-go-v2/config"
@@ -150,7 +149,7 @@ func (e *CacheOptionsEntry) UnmarshalText(text []byte) error {
return e.validate(text) return e.validate(text)
} }
func (e *CacheOptionsEntry) validate(gv any) error { func (e *CacheOptionsEntry) validate(gv interface{}) error {
if e.Type == "" { if e.Type == "" {
var text []byte var text []byte
switch gv := gv.(type) { switch gv := gv.(type) {
@@ -175,10 +174,6 @@ func ParseCacheEntry(in []string) (CacheOptions, error) {
opts := make(CacheOptions, 0, len(in)) opts := make(CacheOptions, 0, len(in))
for _, in := range in { for _, in := range in {
if in == "" {
continue
}
if !strings.Contains(in, "=") { if !strings.Contains(in, "=") {
// This is ref only format. Each field in the CSV is its own entry. // This is ref only format. Each field in the CSV is its own entry.
fields, err := csvvalue.Fields(in, nil) fields, err := csvvalue.Fields(in, nil)
@@ -209,32 +204,14 @@ func addGithubToken(ci *controllerapi.CacheOptionsEntry) {
if ci.Type != "gha" { if ci.Type != "gha" {
return return
} }
version, ok := ci.Attrs["version"]
if !ok {
// https://github.com/actions/toolkit/blob/2b08dc18f261b9fdd978b70279b85cbef81af8bc/packages/cache/src/internal/config.ts#L19
if v, ok := os.LookupEnv("ACTIONS_CACHE_SERVICE_V2"); ok {
if b, err := strconv.ParseBool(v); err == nil && b {
version = "2"
}
}
}
if _, ok := ci.Attrs["token"]; !ok { if _, ok := ci.Attrs["token"]; !ok {
if v, ok := os.LookupEnv("ACTIONS_RUNTIME_TOKEN"); ok { if v, ok := os.LookupEnv("ACTIONS_RUNTIME_TOKEN"); ok {
ci.Attrs["token"] = v ci.Attrs["token"] = v
} }
} }
if _, ok := ci.Attrs["url_v2"]; !ok && version == "2" {
// https://github.com/actions/toolkit/blob/2b08dc18f261b9fdd978b70279b85cbef81af8bc/packages/cache/src/internal/config.ts#L34-L35
if v, ok := os.LookupEnv("ACTIONS_RESULTS_URL"); ok {
ci.Attrs["url_v2"] = v
}
}
if _, ok := ci.Attrs["url"]; !ok { if _, ok := ci.Attrs["url"]; !ok {
// https://github.com/actions/toolkit/blob/2b08dc18f261b9fdd978b70279b85cbef81af8bc/packages/cache/src/internal/config.ts#L28-L33
if v, ok := os.LookupEnv("ACTIONS_CACHE_URL"); ok { if v, ok := os.LookupEnv("ACTIONS_CACHE_URL"); ok {
ci.Attrs["url"] = v ci.Attrs["url"] = v
} else if v, ok := os.LookupEnv("ACTIONS_RESULTS_URL"); ok {
ci.Attrs["url"] = v
} }
} }
} }
@@ -274,5 +251,5 @@ func isActive(pb *controllerapi.CacheOptionsEntry) bool {
if pb.Type != "gha" { if pb.Type != "gha" {
return true return true
} }
return pb.Attrs["token"] != "" && (pb.Attrs["url"] != "" || pb.Attrs["url_v2"] != "") return pb.Attrs["token"] != "" && pb.Attrs["url"] != ""
} }

Some files were not shown because too many files have changed in this diff Show More